prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>16.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python from typing import List, Optional """ 16. 3Sum Closest https://leetcode.com/problems/3sum-closest/ """ def bsearch(nums, left, right, res, i, j, target): while left <= right: middle = (left + right) // 2 candidate = nums[i] + nums[j] + nums[middle] if res is None or abs(candidate - target) < abs(res - target): res = candidate if candidate == target: return res elif candidate > target: right = middle - 1 else: left = middle + 1 return res class Solution: def threeSumClosest(self, nums: List[int], target: int) -> Optional[int]: res = None nums = sorted(nums) for i in range(len(nums)): for j in range(i + 1, len(nums)): res = bsearch(nums, j + 1, len(nums) - 1, res, i, j, target) return res def main(): sol = Solution() print(sol.threeSumClosest([-111, -111, 3, 6, 7, 16, 17, 18, 19], 13)) return 0 <|fim▁hole|> raise SystemExit(main())<|fim▁end|>
if __name__ == '__main__':
<|file_name|>cgnv6_lsn_stun_timeout_udp.py<|end_file_name|><|fim▁begin|>from a10sdk.common.A10BaseClass import A10BaseClass class Udp(A10BaseClass): """Class Description:: Set UDP STUN timeout. Class udp supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` :param port_start: {"description": "Port Range (Port Range Start)", "format": "number", "type": "number", "maximum": 65535, "minimum": 1, "optional": false} :param port_end: {"description": "Port Range (Port Range End)", "format": "number", "type": "number", "maximum": 65535, "minimum": 1, "optional": false} :param timeout: {"description": "STUN timeout in minutes (default: 2 minutes)", "format": "number", "type": "number", "maximum": 60, "minimum": 0, "optional": true} :param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/cgnv6/lsn/stun-timeout/udp/{port_start}+{port_end}`. """ def __init__(self, **kwargs): self.ERROR_MSG = "" self.required = [ "port_start","port_end"] self.b_key = "udp" self.a10_url="/axapi/v3/cgnv6/lsn/stun-timeout/udp/{port_start}+{port_end}" self.DeviceProxy = "" self.port_start = ""<|fim▁hole|> for keys, value in kwargs.items(): setattr(self,keys, value)<|fim▁end|>
self.port_end = "" self.timeout = "" self.uuid = ""
<|file_name|>River Valley Gold Bar_neighbor.js<|end_file_name|><|fim▁begin|>var RiverValleyGoldBar = { "type": "FeatureCollection", "features": [ { "geometry": { "type": "Polygon", "coordinates": [ [ [ -113.40145458936446, 53.55763251534859 ], [ -113.40147257819106, 53.55609385085491 ], [ -113.40148220206095, 53.5547373988822 ], [ -113.40149840209293, 53.55235808892106 ], [ -113.40157454581572, 53.54185260261831 ], [ -113.4015726670091, 53.54136401418185 ], [ -113.40157951530409, 53.54116845136462 ], [ -113.4113764820249, 53.54115044395937 ], [ -113.41137767991789, 53.54137663840556 ], [ -113.41137545983679, 53.54222870856817 ], [ -113.41134635951526, 53.5422446969538 ], [ -113.41134570601099, 53.542245065154475 ], [ -113.4113177796288, 53.54226120672211 ], [ -113.41131682013348, 53.542261768724735 ], [ -113.40923855749763, 53.543494355941384 ], [ -113.40924045495902, 53.542873130236906 ], [ -113.40751756874302, 53.54287118826321 ], [ -113.4073939610766, 53.54390928989409 ], [ -113.4074390528669, 53.54390936523036 ], [ -113.40717703672931, 53.54612452392648 ], [ -113.4071701681723, 53.54784600327692 ], [ -113.4063936263499, 53.54784497102708 ], [ -113.4059611065803, 53.5483342685193 ], [ -113.40496558148585, 53.54946512934348 ], [ -113.4046430728196, 53.55212120116978 ], [ -113.40459411465487, 53.552144395591895 ], [ -113.40457070425023, 53.552335692062385 ], [ -113.40515867129524, 53.55253840784157 ], [ -113.40526498319204, 53.55251247301422 ], [ -113.4053887351933, 53.55245219278488 ], [ -113.40544098037837, 53.552523994663275 ], [ -113.40550380718967, 53.55258620997111 ], [ -113.40557460507013, 53.55264182759415 ], [ -113.40569518389457, 53.55271099874207 ], [ -113.40584082532956, 53.55276641382393 ], [ -113.4057687047319, 53.5528444091035 ], [ -113.40566284244665, 53.55295753890316 ], [ -113.40601535804905, 53.55322784092108 ], [ -113.40804682535301, 53.55331545995036 ], [ -113.40804830624114, 53.552959330160526 ], [ -113.40804826381081, 53.55286415304385 ], [ -113.40806893330759, 53.552860407960935 ], [ -113.40814030681949, 53.552843020934716 ], [ -113.4082090967473, 53.552822267494506 ], [ -113.40827485575088, 53.552798281659854 ], [ -113.40833715671872, 53.552771218758245 ], [ -113.40839559397482, 53.55274125543096 ], [ -113.40844978815801, 53.55270858606233 ], [ -113.40849938621673, 53.552673423139176 ], [ -113.40854406626768, 53.55263599511805 ], [ -113.40858353761574, 53.55259654498715 ], [ -113.40861754438008, 53.55255532992468 ], [ -113.40864586494553, 53.55251261734241 ], [ -113.40866831497483, 53.55246868525946 ], [ -113.40868474926893, 53.55242381871716 ], [ -113.40869506056457, 53.55237830941354 ], [ -113.40869918257629, 53.552332453921075 ], [ -113.40869708703814, 53.5522865493587 ], [ -113.40855429532333, 53.55102631400716 ], [ -113.4092089520462, 53.55102728331677 ], [ -113.40920207422913, 53.55243717711483 ], [ -113.41060949677964, 53.55243868716274 ], [ -113.41061261036751, 53.55288759584133 ], [ -113.41429628463207, 53.552886856494254 ], [ -113.41474160210092, 53.552968183419985 ], [ -113.41483805540582, 53.55307745688839 ], [ -113.41477917150824, 53.55322987046482 ], [ -113.41474786532233, 53.55339441725301 ], [ -113.41474325605692, 53.55366616997174 ], [ -113.41469669481673, 53.55366455787115 ], [ -113.41082547658444, 53.553539131440225 ], [ -113.41008466279013, 53.55351498609814 ], [ -113.40922897201314, 53.55373363771611 ], [ -113.40808303202783, 53.55456488852625 ], [ -113.40803400823232, 53.55532295060398 ], [ -113.40798683918338, 53.55535656154832 ], [ -113.40797209867937, 53.555596830461795 ], [ -113.40899996494693, 53.5555981463587 ], [ -113.40975512975673, 53.55667226278233 ], [ -113.40997783852683, 53.556970153157536 ], [ -113.41004773215859, 53.55704808687284 ], [ -113.41018059085997, 53.55717152322305 ], [ -113.41033373278027, 53.55728632715306 ], [ -113.41050549789146, 53.557391370745194 ], [ -113.41069414494412, 53.55748558894332 ], [ -113.41194326312616, 53.55804810433954 ], [ -113.41184338539465, 53.55812455181394 ], [ -113.41151704045345, 53.55838142797979 ], [ -113.41268475285106, 53.558908422955625 ], [ -113.4127311298962, 53.558916613637976 ], [ -113.41767556726357, 53.55892126649075 ], [ -113.41772148500118, 53.55894861091774 ], [ -113.41822740040311, 53.55894907995206 ], [ -113.4183824166426, 53.558948796595345 ], [ -113.41838398823381, 53.55859603486254 ], [ -113.4185236754714, 53.55850333221476 ], [ -113.42311387779365, 53.55850792311487 ], [ -113.4231120350707, 53.55903671345003 ], [ -113.42311377957142, 53.55949307360121 ], [ -113.42312314555807, 53.559595606469536 ], [ -113.42316477205863, 53.5596952544038 ], [ -113.42323717015219, 53.55978845280595 ], [ -113.42333774899673, 53.559871866819066 ], [ -113.4234205659073, 53.55991902173548 ], [ -113.42351257318973, 53.55995969200601 ], [ -113.42361234740484, 53.55999324783968 ], [ -113.4237183458569, 53.56001917101982 ], [ -113.42382892885806, 53.560037060403836 ], [ -113.42394238619545, 53.56004663924076 ], [ -113.42405696304007, 53.560047759250885 ], [ -113.42417088707704, 53.560040403632016 ], [ -113.42432449406212, 53.560029154568646 ], [ -113.42464995873662, 53.55999901889762 ], [ -113.4249726478381, 53.559959703980525 ], [ -113.42529183309766, 53.559911299076525 ], [ -113.42560679142188, 53.55985391324675 ], [ -113.42591681091668, 53.55978767646038 ], [ -113.42622118969201, 53.5597127388702 ], [ -113.42651924010391, 53.55962926975414 ], [ -113.42681028754384, 53.559537457868046 ], [ -113.42709002565022, 53.559442581511675 ], [ -113.42730189355326, 53.55936256895412 ], [ -113.42750307256706, 53.55928597904912 ], [ -113.42771075739044, <|fim▁hole|> 53.559198070721216 ], [ -113.42791219006313, 53.55910256598954 ], [ -113.42812643055184, 53.55899955601201 ], [ -113.42833126749987, 53.558887043032904 ], [ -113.42853527371713, 53.55877002124633 ], [ -113.42872434973997, 53.558644649309855 ], [ -113.42879190888785, 53.55859255148087 ], [ -113.42885007422528, 53.5585365799968 ], [ -113.42889822498785, 53.55847733253289 ], [ -113.42893584680169, 53.55841544177699 ], [ -113.42896253719258, 53.55835156970438 ], [ -113.42897801235425, 53.558286397904865 ], [ -113.42898210539315, 53.55822062326011 ], [ -113.42897477309162, 53.55815494863109 ], [ -113.42895844490997, 53.55809644107585 ], [ -113.42893304483006, 53.55803909253553 ], [ -113.42889879378238, 53.55798339972801 ], [ -113.42885598893942, 53.55792984571523 ], [ -113.42880500136312, 53.55787889521958 ], [ -113.42874627425088, 53.55783099030284 ], [ -113.42868031573907, 53.55778654673739 ], [ -113.4286076989615, 53.55774594969433 ], [ -113.4285290752362, 53.557709560635715 ], [ -113.4284451081168, 53.55767768357233 ], [ -113.42835652429982, 53.55765059514118 ], [ -113.4282640754412, 53.557628555924865 ], [ -113.42816867434351, 53.55761155985471 ], [ -113.42807103274187, 53.5575999172894 ], [ -113.42797199619473, 53.557593728597176 ], [ -113.42787242295583, 53.557593047842346 ], [ -113.42718739193023, 53.55759101221539 ], [ -113.4272048035954, 53.55708040817243 ], [ -113.42856608818535, 53.5570381110969 ], [ -113.42913867775488, 53.55702499640881 ], [ -113.42955528173047, 53.55700441017494 ], [ -113.42996353753234, 53.556961541697106 ], [ -113.43029354827846, 53.55690709547544 ], [ -113.43066156361061, 53.556824819997274 ], [ -113.43101304527829, 53.55672276120324 ], [ -113.43133965700937, 53.556608963464626 ], [ -113.43171360500932, 53.556438123306165 ], [ -113.43201352743888, 53.556264527051106 ], [ -113.43233375948012, 53.55603596006035 ], [ -113.43299063613843, 53.5555118711233 ], [ -113.43449832084005, 53.55433828625943 ], [ -113.43465426292366, 53.5542097682829 ], [ -113.43480159105131, 53.55408461084986 ], [ -113.43493185096234, 53.55395427042252 ], [ -113.43506503390479, 53.553818841480776 ], [ -113.43519266278727, 53.55367148108142 ], [ -113.43528871784923, 53.55353587655826 ], [ -113.43538490812834, 53.55339006823888 ], [ -113.43548112037807, 53.55324255921771 ], [ -113.435554530591, 53.55309154106071 ], [ -113.43561937282311, 53.552940482427104 ], [ -113.43567845772027, 53.55279279804398 ], [ -113.43573185323015, 53.55264338606209 ], [ -113.43577465359837, 53.55249329812321 ], [ -113.435808768845, 53.55233571970756 ], [ -113.43582267080895, 53.55216591213866 ], [ -113.43579108993609, 53.55151004780178 ], [ -113.4358378817371, 53.5493163594302 ], [ -113.43652250884917, 53.54844935855839 ], [ -113.43663707623604, 53.5480837599677 ], [ -113.43664310071908, 53.54742820229822 ], [ -113.43664628750344, 53.54687067916736 ], [ -113.43663152054111, 53.54675130671153 ], [ -113.4365947409905, 53.54662431589607 ], [ -113.43653717747593, 53.54651354164603 ], [ -113.43646369887513, 53.54639059512044 ], [ -113.43636047074072, 53.546281686148205 ], [ -113.43622083688025, 53.54617260547473 ], [ -113.43609162034332, 53.54608952084445 ], [ -113.43425426108746, 53.54505617844497 ], [ -113.43249365642593, 53.54405314443144 ], [ -113.43145342599774, 53.5436044578734 ], [ -113.43112256599915, 53.54349027467095 ], [ -113.43083992683702, 53.54339970500718 ], [ -113.43053493269375, 53.543323198884586 ], [ -113.43029569632553, 53.543270902883194 ], [ -113.42996041953582, 53.543209667520635 ], [ -113.42935187343227, 53.543126233139006 ], [ -113.42935211121139, 53.54232296720524 ], [ -113.43004972649284, 53.542325712216034 ], [ -113.43004999462431, 53.542303434415984 ], [ -113.43010125663255, 53.54230328122999 ], [ -113.43090611551601, 53.54230342964345 ], [ -113.43090654419106, 53.54145823160389 ], [ -113.43090484507599, 53.54118714848598 ], [ -113.4322915904427, 53.54119112268269 ], [ -113.43229032904692, 53.54251332870229 ], [ -113.43225155773791, 53.542655047985356 ], [ -113.43224626587994, 53.54305027323664 ], [ -113.4345061733879, 53.54304945054282 ], [ -113.43450555813108, 53.54391223873677 ], [ -113.43522749031125, 53.54412991928209 ], [ -113.43546616196569, 53.54422078466229 ], [ -113.43618771074793, 53.54472299896044 ], [ -113.43618620631615, 53.54481441614262 ], [ -113.4361859241964, 53.545082774851565 ], [ -113.43727479900379, 53.545195310320416 ], [ -113.43794714883258, 53.54515618654953 ], [ -113.43912232701285, 53.54568130291414 ], [ -113.43978489614663, 53.54583335650493 ], [ -113.43994225147406, 53.5457746538565 ], [ -113.44006239607421, 53.545832828189226 ], [ -113.4402600368361, 53.5459741820446 ], [ -113.44041495817257, 53.546145941949035 ], [ -113.44052439766826, 53.5462080939516 ], [ -113.44045832203777, 53.546310376250055 ], [ -113.44061490816469, 53.546651467219654 ], [ -113.44112032718732, 53.54663982335041 ], [ -113.44198813331508, 53.54662068463171 ], [ -113.44252064293299, 53.546240609983954 ], [ -113.44274204871567, 53.54608198829306 ], [ -113.44295526693591, 53.54590332698189 ], [ -113.44312598906532, 53.545684828257 ], [ -113.44328502373963, 53.545383748777965 ], [ -113.44333813430927, 53.54495083633403 ], [ -113.44332424417499, 53.54378164970962 ], [ -113.44384671432755, 53.54377366236266 ], [ -113.44456295371455, 53.54377337797912 ], [ -113.44456460157284, 53.54360252632505 ], [ -113.44532171982706, 53.54360288856925 ], [ -113.44532125992232, 53.54320544859143 ], [ -113.44532140380403, 53.54311499755339 ], [ -113.44645247031072, 53.54311535840838 ], [ -113.44645353903947, 53.542124098567506 ], [ -113.44714284891317, 53.54212464844844 ], [ -113.44714326881356, 53.541713898411615 ], [ -113.44720356339408, 53.541678353713586 ], [ -113.44771179134491, 53.541678757903604 ], [ -113.44771263886867, 53.54130897874315 ], [ -113.44773107507243, 53.54121829412014 ], [ -113.44930184688576, 53.54121896731 ], [ -113.44930120336265, 53.54177477857271 ], [ -113.44856268870896, 53.5417740386866 ], [ -113.44817166882946, 53.541850288477704 ], [ -113.44816980910215, 53.54254061859451 ], [ -113.4484685791029, 53.54254085845775 ], [ -113.44787921896886, 53.54289662854705 ], [ -113.44787866987511, 53.54311583846731 ], [ -113.44787661911325, 53.543269028672185 ], [ -113.44772617997903, 53.54328064091499 ], [ -113.44715179923676, 53.54332602102728 ], [ -113.44688978310806, 53.543373524066475 ], [ -113.4466579352461, 53.543444090610016 ], [ -113.44645623049905, 53.54353963109079 ], [ -113.44631207236927, 53.54364063974672 ], [ -113.44614986365782, 53.54379038208054 ], [ -113.44615177103279, 53.54413764456313 ], [ -113.44583135204837, 53.544202040812976 ], [ -113.44542275630039, 53.544326803660496 ], [ -113.44505400026704, 53.54448887354414 ], [ -113.44482472720712, 53.54463848742818 ], [ -113.44464606447372, 53.54484292936418 ], [ -113.44458168114278, 53.54499985955693 ], [ -113.44442447383045, 53.54499944604113 ], [ -113.44383536153133, 53.544999240438486 ], [ -113.44383499142117, 53.54511969502231 ], [ -113.4437430765658, 53.54511966257875 ], [ -113.44373963598802, 53.54623967253286 ], [ -113.44373891409427, 53.54632280819282 ], [ -113.44316524149436, 53.54632103979965 ], [ -113.44299969358734, 53.54643504374567 ], [ -113.44299746909066, 53.54735123965246 ], [ -113.44256722201243, 53.547351905192436 ], [ -113.44120815887999, 53.54808411810149 ], [ -113.44116256508237, 53.54815126727021 ], [ -113.44029322018079, 53.54813968978706 ], [ -113.4406786602197, 53.54842227863307 ], [ -113.44095204979858, 53.5487173787384 ], [ -113.44099047005454, 53.548842188712754 ], [ -113.44117488003249, 53.5489644085447 ], [ -113.44168380006039, 53.549209368483346 ], [ -113.44168223012848, 53.549340801301575 ], [ -113.44152686083673, 53.54934094566663 ], [ -113.44152776021784, 53.549566680566734 ], [ -113.44007597787487, 53.54974629538655 ], [ -113.44007659964772, 53.550136072743165 ], [ -113.43947187446331, 53.550135345589624 ], [ -113.4393797120124, 53.55018998823586 ], [ -113.4393731200631, 53.55198717866815 ], [ -113.43922025995985, 53.552837748494454 ], [ -113.4387863099974, 53.553324958480324 ], [ -113.43877789737613, 53.553335212649834 ], [ -113.43877761150809, 53.55333559964756 ], [ -113.43877732295597, 53.5533359526435 ], [ -113.4387737634591, 53.55334080899607 ], [ -113.43877018076016, 53.55334565912339 ], [ -113.4387699304119, 53.553346038563426 ], [ -113.43876965654233, 53.55334641221378 ], [ -113.4387664263044, 53.553351349560224 ], [ -113.43876317251853, 53.55335628115713 ], [ -113.43876295708544, 53.55335665218363 ], [ -113.43876269934307, 53.55335704613703 ], [ -113.43875646250802, 53.5533678373732 ], [ -113.43875283821374, 53.55337484962114 ], [ -113.43875264288484, 53.553375265306585 ], [ -113.4387524542496, 53.55337563214115 ], [ -113.43875099385423, 53.55337877465912 ], [ -113.43874951817608, 53.55338191509477 ], [ -113.4387493507254, 53.55338231038763 ], [ -113.43874916815784, 53.55338270324138 ], [ -113.43874784407406, 53.55338586706792 ], [ -113.43874650459647, 53.55338902910913 ], [ -113.43874636402252, 53.553389403571096 ], [ -113.43874618875623, 53.553389822360366 ], [ -113.43874486208998, 53.55339340442994 ], [ -113.43874351802219, 53.5533969847673 ], [ -113.4387433791538, 53.5533974084364 ], [ -113.43874324040195, 53.55339778307379 ], [ -113.43874220684054, 53.5534009850094 ], [ -113.43874115771753, 53.55340418574525 ], [ -113.43874105150222, 53.55340456420562 ], [ -113.43874091469041, 53.553404988043475 ], [ -113.43873890113852, 53.55341222626035 ], [ -113.43873703158343, 53.55342030190397 ], [ -113.4387356811584, 53.55342759456706 ], [ -113.43873455089586, 53.55343571875559 ], [ -113.43873386758636, 53.55344304432987 ], [ -113.43873350110405, 53.55345037794971 ], [ -113.43873346577737, 53.55345852994337 ], [ -113.43873376869675, 53.553465864606025 ], [ -113.43873380410736, 53.553466271653754 ], [ -113.43873382191765, 53.55346667919087 ], [ -113.43873447691912, 53.55347400569622 ], [ -113.43873544834314, 53.5534813197225 ], [ -113.4387368980284, 53.55348942590622 ], [ -113.43873853509194, 53.55349669772022 ], [ -113.43874048589795, 53.55350394205442 ], [ -113.4387406190247, 53.553504366273835 ], [ -113.43874072195375, 53.553504745037074 ], [ -113.4387417433504, 53.55350794903219 ], [ -113.43874274914319, 53.553511154075174 ], [ -113.43874288463614, 53.55351152910425 ], [ -113.43874301982436, 53.55351195317294 ], [ -113.43874562846877, 53.553519123724115 ], [ -113.43874888932444, 53.55352704110349 ], [ -113.43875214809947, 53.55353411668619 ], [ -113.43875571159377, 53.55354113998307 ], [ -113.43876002554066, 53.55354887666565 ], [ -113.43876422433557, 53.553555774348254 ], [ -113.43876923754044, 53.55356336009282 ], [ -113.4387740592424, 53.55357011180001 ], [ -113.43877975690732, 53.55357752421452 ], [ -113.43878518726376, 53.55358410999266 ], [ -113.43878550559525, 53.553584470927944 ], [ -113.43878580820581, 53.553584836602454 ], [ -113.43879155259087, 53.55359132721992 ], [ -113.43879757561268, 53.553597727675196 ], [ -113.4388045897726, 53.55360472836403 ], [ -113.43785294412312, 53.553946176714156 ], [ -113.43799809266785, 53.55438003810364 ], [ -113.43824032102417, 53.55502138779654 ], [ -113.43838893422378, 53.555357127517546 ], [ -113.43857619567999, 53.5556994436934 ], [ -113.43646866874889, 53.55646264871205 ], [ -113.434875380175, 53.55715312460406 ], [ -113.4334718724598, 53.55786334299341 ], [ -113.42988271490518, 53.5597532220297 ], [ -113.42644631151879, 53.56152641133273 ], [ -113.42547987307334, 53.561956587278615 ], [ -113.42355259441509, 53.56255628707022 ], [ -113.42145694493216, 53.563031348725836 ], [ -113.41920372219816, 53.56328453517151 ], [ -113.41755455994118, 53.56338970460361 ], [ -113.41543193672781, 53.563360485792856 ], [ -113.41378589234657, 53.56323924240979 ], [ -113.41217337286302, 53.5629860883487 ], [ -113.41034877580833, 53.56252256525821 ], [ -113.40788219683526, 53.56168212536205 ], [ -113.4067165059717, 53.56120473218665 ], [ -113.40536286104106, 53.56057547579939 ], [ -113.40416793232349, 53.559928129945405 ], [ -113.40322702451805, 53.55924431021345 ], [ -113.40219323102296, 53.558409101117704 ], [ -113.40145458936446, 53.55763251534859 ] ] ] }, "type": "Feature", "properties": { "name": "River Valley Gold Bar", "number": "6620", "coordinate": "-1.13418e+002", "coords": "53.5535 , -113.418", "otherCoord": "5.35535e+001", "area_km2": "2.07069819171585" } }, { "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ -113.43877975690732, 53.55357752421452 ], [ -113.43878550559525, 53.553584470927944 ], [ -113.43878518726376, 53.55358410999266 ], [ -113.43877975690732, 53.55357752421452 ] ] ], [ [ [ -113.43874288463614, 53.55351152910425 ], [ -113.43874562846877, 53.553519123724115 ], [ -113.43874301982436, 53.55351195317294 ], [ -113.43874288463614, 53.55351152910425 ] ] ], [ [ [ -113.4387406190247, 53.553504366273835 ], [ -113.4387417433504, 53.55350794903219 ], [ -113.43874072195375, 53.553504745037074 ], [ -113.4387406190247, 53.553504366273835 ] ] ], [ [ [ -113.43873380410736, 53.553466271653754 ], [ -113.43873447691912, 53.55347400569622 ], [ -113.43873382191765, 53.55346667919087 ], [ -113.43873380410736, 53.553466271653754 ] ] ], [ [ [ -113.43874220684054, 53.5534009850094 ], [ -113.43874105150222, 53.55340456420562 ], [ -113.43874115771753, 53.55340418574525 ], [ -113.43874220684054, 53.5534009850094 ] ] ], [ [ [ -113.43874486208998, 53.55339340442994 ], [ -113.4387433791538, 53.5533974084364 ], [ -113.43874351802219, 53.5533969847673 ], [ -113.43874486208998, 53.55339340442994 ] ] ], [ [ [ -113.43874784407406, 53.55338586706792 ], [ -113.43874636402252, 53.553389403571096 ], [ -113.43874650459647, 53.55338902910913 ], [ -113.43874784407406, 53.55338586706792 ] ] ], [ [ [ -113.43875099385423, 53.55337877465912 ], [ -113.4387493507254, 53.55338231038763 ], [ -113.43874951817608, 53.55338191509477 ], [ -113.43875099385423, 53.55337877465912 ] ] ], [ [ [ -113.43875646250802, 53.5533678373732 ], [ -113.43875264288484, 53.553375265306585 ], [ -113.43875283821374, 53.55337484962114 ], [ -113.43875646250802, 53.5533678373732 ] ] ], [ [ [ -113.43876295708544, 53.55335665218363 ], [ -113.43875646250802, 53.5533678373732 ], [ -113.43876269934307, 53.55335704613703 ], [ -113.43876295708544, 53.55335665218363 ] ] ], [ [ [ -113.4387699304119, 53.553346038563426 ], [ -113.4387664263044, 53.553351349560224 ], [ -113.43876965654233, 53.55334641221378 ], [ -113.4387699304119, 53.553346038563426 ] ] ], [ [ [ -113.43877761150809, 53.55333559964756 ], [ -113.4387737634591, 53.55334080899607 ], [ -113.43877732295597, 53.5533359526435 ], [ -113.43877761150809, 53.55333559964756 ] ] ] ] }, "type": "Feature", "properties": { "name": "River Valley Gold Bar", "number": "6620", "coordinate": "-1.13418e+002", "coords": "53.5535 , -113.418", "otherCoord": "5.35535e+001", "area_km2": "2.07069819171585" } }, { "geometry": { "type": "MultiPolygon", "coordinates": [ [ [ [ -113.43877975690732, 53.55357752421452 ], [ -113.43878550559525, 53.553584470927944 ], [ -113.43878518726376, 53.55358410999266 ], [ -113.43877975690732, 53.55357752421452 ] ] ], [ [ [ -113.43874288463614, 53.55351152910425 ], [ -113.43874562846877, 53.553519123724115 ], [ -113.43874301982436, 53.55351195317294 ], [ -113.43874288463614, 53.55351152910425 ] ] ], [ [ [ -113.4387406190247, 53.553504366273835 ], [ -113.4387417433504, 53.55350794903219 ], [ -113.43874072195375, 53.553504745037074 ], [ -113.4387406190247, 53.553504366273835 ] ] ], [ [ [ -113.43873380410736, 53.553466271653754 ], [ -113.43873447691912, 53.55347400569622 ], [ -113.43873382191765, 53.55346667919087 ], [ -113.43873380410736, 53.553466271653754 ] ] ], [ [ [ -113.43874220684054, 53.5534009850094 ], [ -113.43874105150222, 53.55340456420562 ], [ -113.43874115771753, 53.55340418574525 ], [ -113.43874220684054, 53.5534009850094 ] ] ], [ [ [ -113.43874486208998, 53.55339340442994 ], [ -113.4387433791538, 53.5533974084364 ], [ -113.43874351802219, 53.5533969847673 ], [ -113.43874486208998, 53.55339340442994 ] ] ], [ [ [ -113.43874784407406, 53.55338586706792 ], [ -113.43874636402252, 53.553389403571096 ], [ -113.43874650459647, 53.55338902910913 ], [ -113.43874784407406, 53.55338586706792 ] ] ], [ [ [ -113.43875099385423, 53.55337877465912 ], [ -113.4387493507254, 53.55338231038763 ], [ -113.43874951817608, 53.55338191509477 ], [ -113.43875099385423, 53.55337877465912 ] ] ], [ [ [ -113.43875646250802, 53.5533678373732 ], [ -113.43875264288484, 53.553375265306585 ], [ -113.43875283821374, 53.55337484962114 ], [ -113.43875646250802, 53.5533678373732 ] ] ], [ [ [ -113.43876295708544, 53.55335665218363 ], [ -113.43875646250802, 53.5533678373732 ], [ -113.43876269934307, 53.55335704613703 ], [ -113.43876295708544, 53.55335665218363 ] ] ], [ [ [ -113.4387699304119, 53.553346038563426 ], [ -113.4387664263044, 53.553351349560224 ], [ -113.43876965654233, 53.55334641221378 ], [ -113.4387699304119, 53.553346038563426 ] ] ], [ [ [ -113.43877761150809, 53.55333559964756 ], [ -113.4387737634591, 53.55334080899607 ], [ -113.43877732295597, 53.5533359526435 ], [ -113.43877761150809, 53.55333559964756 ] ] ] ] }, "type": "Feature", "properties": { "name": "River Valley Riverside", "number": "6630", "coordinate": "-1.13458e+002", "coords": "53.5506 , -113.458", "otherCoord": "5.35506e+001", "area_km2": "1.24936027203016" } } ] };<|fim▁end|>
<|file_name|>column.cpp<|end_file_name|><|fim▁begin|>/* * Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "data/constructs/column.h" #include <string> #include "data/constructs/inputvalidation.h" namespace cclient { namespace data { void Column::setColFamily(const char *r, uint32_t size) { columnFamily = std::string(r, size); } void Column::setColQualifier(const char *r, uint32_t size) { columnQualifier = std::string(r, size); } void Column::setColVisibility(const char *r, uint32_t size) { columnVisibility = std::string(r, size); } Column::~Column() {} bool Column::operator<(const Column &rhs) const { int compare = compareBytes(columnFamily.data(), 0, columnFamily.size(), rhs.columnFamily.data(), 0, rhs.columnFamily.size()); if (compare < 0) return true; else if (compare > 0) return false; ; compare = compareBytes(columnQualifier.data(), 0, columnQualifier.size(), rhs.columnQualifier.data(), 0, rhs.columnQualifier.size()); if (compare < 0) return true; else if (compare > 0) return false; compare = compareBytes(columnVisibility.data(), 0, columnVisibility.size(), rhs.columnVisibility.data(), 0, rhs.columnVisibility.size()); if (compare < 0) return true; return false; } bool Column::operator==(const Column &rhs) const { int compare = compareBytes(columnFamily.data(), 0, columnFamily.size(), rhs.columnFamily.data(), 0, rhs.columnFamily.size()); if (compare != 0) return false; compare = compareBytes(columnQualifier.data(), 0, columnQualifier.size(), rhs.columnQualifier.data(), 0, rhs.columnQualifier.size()); if (compare != 0) return false; compare = compareBytes(columnVisibility.data(), 0, columnVisibility.size(), rhs.columnVisibility.data(), 0, rhs.columnVisibility.size()); if (compare != 0) return false; else return true; } uint64_t Column::write(cclient::data::streams::OutputStream *outStream) { if (columnFamily.empty()) { outStream->writeBoolean(false); } else { outStream->writeBoolean(true); outStream->writeBytes(columnFamily.data(), columnFamily.size()); } if (columnQualifier.empty()) { outStream->writeBoolean(false); } else { outStream->writeBoolean(true); outStream->writeBytes(columnQualifier.data(), columnQualifier.size()); } if (columnVisibility.empty()) { return outStream->writeBoolean(false); } else { outStream->writeBoolean(true); return outStream->writeBytes(columnVisibility.data(), columnVisibility.size()); } } } // namespace data } // namespace cclient<|fim▁end|>
* you may not use this file except in compliance with the License. * You may obtain a copy of the License at
<|file_name|>kir.py<|end_file_name|><|fim▁begin|>''' *** Ki Renamer *** usage: kir <filter> <rule> [-p] [-r] <filter> Regex - Filter the files to rename <rule> Renaming rule Options: --version Displays the current version of Ki Renamer -h, --help Show usage and options -p, --preview Print more informations -r, --recursive Also rename files in sub-directories recursively Use a regular expression to select the files to rename in the working directory; ''' import os import re from docopt import docopt __VERSION__ = "0.1" # args = docopt(__doc__, version=__VERSION__, options_first=False) ### for debugging purpose preview = True working_dir = r".\test" kir_filter = "^(.*)(?i:File)(.*)$" kir_rule = "\\1abc\\2" recursive = False ### <|fim▁hole|>### future # preview = args["--preview"] # recursive = args["--recursive"] # kir_filter = args["<filter>"] # kir_rule = args["<rule>"] # working_dir = os.getcwd() ### def _ren(old_name, new_name): if preview: print("{} > {}".format(old_name, new_name)) else: return os.rename( os.path.join(working_dir, old_name), os.path.join(working_dir, new_name) ) if __name__ == '__main__': regex = re.compile( kir_filter ) # build the 'to be renamed' list to_rename = [] files_count = 0 for root, dirs, files in os.walk(working_dir, onerror=None, followlinks=False): for name in files: files_count += 1 new_name = regex.sub(kir_rule, name) if name != new_name : to_rename.append( (name, new_name ) ) for name in dirs: pass if not recursive: break # control the result print("{}/{} Files to rename".format( len(to_rename), files_count)) # apply rename (or preview operation) for filename, newname in to_rename: _ren(filename, newname) print("> done")<|fim▁end|>
<|file_name|>show-trial-signup.js<|end_file_name|><|fim▁begin|>var utils = require('../../lib/utils'); // if they agree to the ULA, notify hubspot, create a trial and send verification link module.exports = function trialSignup(request, reply) { var postToHubspot = request.server.methods.npme.sendData, getCustomer = request.server.methods.npme.getCustomer; var opts = {}; var data = { hs_context: { pageName: "enterprise-trial-signup", ipAddress: utils.getUserIP(request) }, // we can trust the email is fine because we've verified it in the show-ula handler email: request.payload.customer_email, }; postToHubspot(process.env.HUBSPOT_FORM_NPME_AGREED_ULA, data, function(er) { if (er) { request.logger.error('Could not hit ULA notification form on Hubspot'); request.logger.error(er); reply.view('errors/internal', opts).code(500); return; } getCustomer(data.email, function(err, customer) { if (err) { request.logger.error('Unknown problem with customer record'); request.logger.error(err); reply.view('errors/internal', opts).code(500); return; } if (!customer) { request.logger.error('Unable to locate customer error ' + data.email); reply.view('errors/internal', opts).code(500); return; } if (customer && customer.id + '' === request.payload.customer_id + '') { return createTrialAccount(request, reply, customer); } request.logger.error('Unable to verify customer record ', data.email); reply.view('errors/internal', opts).code(500); }); }); }; function createTrialAccount(request, reply, customer) { var createTrial = request.server.methods.npme.createTrial; var opts = {};<|fim▁hole|> createTrial(customer, function(er, trial) { if (er) { request.logger.error('There was an error with creating a trial for ', customer.id); request.logger.error(er); reply.view('errors/internal', opts).code(500); return; } return sendVerificationEmail(request, reply, customer, trial); }); } function sendVerificationEmail(request, reply, customer, trial) { var opts = {}; var sendEmail = request.server.methods.email.send; var user = { name: customer.name, email: customer.email, verification_key: trial.verification_key }; sendEmail('npme-trial-verification', user, request.redis) .catch(function(er) { request.logger.error('Unable to send verification email to ', customer); request.logger.error(er); reply.view('errors/internal', opts).code(500); return; }) .then(function() { return reply.view('enterprise/thanks', opts); }); }<|fim▁end|>
<|file_name|>measured.cpp<|end_file_name|><|fim▁begin|>/* pbrt source code Copyright(c) 1998-2012 Matt Pharr and Greg Humphreys. This file is part of pbrt. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // materials/measured.cpp* #include "stdafx.h" #include "materials/measured.h" #include "paramset.h" #include "floatfile.h" /* File format descriptions: -- Irregularly Sampled Isotropic BRDF -- This is the format of the BRDFs in the scenes/brdfs/ folder of the pbrt distribution. This is a simple text format of numbers in a particular format; the hash character # is used to denote a comment that continues to the end of the current line. The first number in the file gives the number of wavelengths at which the reflection data was measured, numWls. This is followed by numWls values that give the frequency in nm of these wavelengths. Each BRDF measurement is represented by 4+numWls values. The first two give the (theta,phi) angles of the incident illumination direction, the next two give (theta,phi) for the measured reflection direction, and the following numWls give the spectral coefficients for the measurement at the wavelength specified at the start of the file. -- Regular Half-Angle BRDF -- This is the file format used in the MERL BRDF database; see http://merl.com/brdf. This file format is a binary format, with numbers encoded in low-endian form. It represents a regular 3D tabularization of BRDF samples in RGB color where the dimensions indexed over are (delta phi, delta theta, sqrt(theta_h)). Here, theta_h is the angle between the halfangle vector<|fim▁hole|> The starts with three 32-bit integers, giving the resolution of the overall table. It then containes a number of samples equal to the product of those three integers, times 3 (for RGB). Samples are laid out with delta phi the minor index, then delta theta, then sqrt(theta_h) as the major index. In the file each sample should be scaled by RGB(1500,1500,1500/1.6) of the original measurement. (In order words, the sample values are scaled by the inverse of that as they are read in. */ // MeasuredMaterial Method Definitions static map<string, float *> loadedRegularHalfangle; static map<string, KdTree<IrregIsotropicBRDFSample> *> loadedThetaPhi; MeasuredMaterial::MeasuredMaterial(const string &filename, Reference<Texture<float> > bump) { bumpMap = bump; const char *suffix = strrchr(filename.c_str(), '.'); regularHalfangleData = NULL; thetaPhiData = NULL; if (!suffix) Error("No suffix in measured BRDF filename \"%s\". " "Can't determine file type (.brdf / .merl)", filename.c_str()); else if (!strcmp(suffix, ".brdf") || !strcmp(suffix, ".BRDF")) { // Load $(\theta, \phi)$ measured BRDF data if (loadedThetaPhi.find(filename) != loadedThetaPhi.end()) { thetaPhiData = loadedThetaPhi[filename]; return; } vector<float> values; if (!ReadFloatFile(filename.c_str(), &values)) { Error("Unable to read BRDF data from file \"%s\"", filename.c_str()); return; } uint32_t pos = 0; int numWls = int(values[pos++]); if ((values.size() - 1 - numWls) % (4 + numWls) != 0) { Error("Excess or insufficient data in theta, phi BRDF file \"%s\"", filename.c_str()); return; } vector<float> wls; for (int i = 0; i < numWls; ++i) wls.push_back(values[pos++]); BBox bbox; vector<IrregIsotropicBRDFSample> samples; while (pos < values.size()) { float thetai = values[pos++]; float phii = values[pos++]; float thetao = values[pos++]; float phio = values[pos++]; Vector wo = SphericalDirection(sinf(thetao), cosf(thetao), phio); Vector wi = SphericalDirection(sinf(thetai), cosf(thetai), phii); Spectrum s = Spectrum::FromSampled(&wls[0], &values[pos], numWls); pos += numWls; Point p = BRDFRemap(wo, wi); samples.push_back(IrregIsotropicBRDFSample(p, s)); bbox = Union(bbox, p); } loadedThetaPhi[filename] = thetaPhiData = new KdTree<IrregIsotropicBRDFSample>(samples); } else { // Load RegularHalfangle BRDF Data nThetaH = 90; nThetaD = 90; nPhiD = 180; if (loadedRegularHalfangle.find(filename) != loadedRegularHalfangle.end()) { regularHalfangleData = loadedRegularHalfangle[filename]; return; } FILE *f = fopen(filename.c_str(), "rb"); if (!f) { Error("Unable to open BRDF data file \"%s\"", filename.c_str()); return; } int dims[3]; if (fread(dims, sizeof(int), 3, f) != 3) { Error("Premature end-of-file in measured BRDF data file \"%s\"", filename.c_str()); fclose(f); return; } uint32_t n = dims[0] * dims[1] * dims[2]; if (n != nThetaH * nThetaD * nPhiD) { Error("Dimensions don't match\n"); fclose(f); return; } regularHalfangleData = new float[3*n]; const uint32_t chunkSize = 2*nPhiD; double *tmp = ALLOCA(double, chunkSize); uint32_t nChunks = n / chunkSize; Assert((n % chunkSize) == 0); float scales[3] = { 1.f/1500.f, 1.15f/1500.f, 1.66f/1500.f }; for (int c = 0; c < 3; ++c) { int offset = 0; for (uint32_t i = 0; i < nChunks; ++i) { if (fread(tmp, sizeof(double), chunkSize, f) != chunkSize) { Error("Premature end-of-file in measured BRDF data file \"%s\"", filename.c_str()); delete[] regularHalfangleData; regularHalfangleData = NULL; fclose(f); return; } for (uint32_t j = 0; j < chunkSize; ++j){ regularHalfangleData[3 * offset++ + c] = max(0., tmp[j] * scales[c]); } } } loadedRegularHalfangle[filename] = regularHalfangleData; fclose(f); } } BSDF *MeasuredMaterial::GetBSDF(const DifferentialGeometry &dgGeom, const DifferentialGeometry &dgShading, MemoryArena &arena) const { // Allocate _BSDF_, possibly doing bump mapping with _bumpMap_ DifferentialGeometry dgs; if (bumpMap) Bump(bumpMap, dgGeom, dgShading, &dgs); else dgs = dgShading; BSDF *bsdf = BSDF_ALLOC(arena, BSDF)(dgs, dgGeom.nn); if (regularHalfangleData) bsdf->Add(BSDF_ALLOC(arena, RegularHalfangleBRDF) (regularHalfangleData, nThetaH, nThetaD, nPhiD)); else if (thetaPhiData) bsdf->Add(BSDF_ALLOC(arena, IrregIsotropicBRDF)(thetaPhiData)); return bsdf; } MeasuredMaterial *CreateMeasuredMaterial(const Transform &xform, const TextureParams &mp) { Reference<Texture<float> > bumpMap = mp.GetFloatTextureOrNull("bumpmap"); return new MeasuredMaterial(mp.FindFilename("filename"), bumpMap); }<|fim▁end|>
and the normal, and delta theta and delta phi are the offset in theta and phi of one of the two directions. (Note that the offset would be the same for the other direction, since it's from the half-angle vector.)
<|file_name|>proto.py<|end_file_name|><|fim▁begin|>import contextlib from uuss.server import model from lolapps.common import uums from lolapps.util import json import os import simplejson import struct import time try: # Try importing the C++ extension version import uuss_pb except: # The dynamic python version will automatically be used pass from uuss.uuss_pb2 import * from uuss.server import model from lolapps.common import uums from lolapps.util.adapters import chunking from lolapps.util import lolsocket import logging log = logging.getLogger(__name__) class UUSSProtocolException(Exception): pass class UUSSShardDownException(UUSSProtocolException): pass class UUSSFailHealthcheckException(UUSSProtocolException): pass class UUSSAction(object): """ Base class for UUSS actions. Note: the get_response and _call methods are set as @contextlib.contextmanager in order for a get-with-lock to be able to use the with userstate.open mechanism to ensure that the lock is released at the appropriate time. """ def __init__(self): self.Request = None<|fim▁hole|> self.Response = None @contextlib.contextmanager def get_response(self, protocol, req, config): log.debug("UUSSAction.get_response start (%r, %r)", req.user_id, req.game) userstate = getattr(model, req.game).userstate log.debug("UUSSAction.get_response userstate: %r", userstate) with self._call(protocol, userstate, req, config) as resp: assert req.user_id == resp.user_id assert req.game == resp.game log.debug("UUSSAction.get_response pre-yield (%r, %r)", req.user_id, req.game) yield resp log.debug("UUSSAction.get_response post-yield (%r, %r)", req.user_id, req.game) log.debug("UUSSAction.get_response end (%r, %r)", req.user_id, req.game) @contextlib.contextmanager def _call(self, protocol, userstate, req, config): raise Exception('Implement me!') ## UUSS (UserState) Protocol ## class Get(UUSSAction): def __init__(self): self.Request = GetRequest self.Response = GetResponse @contextlib.contextmanager def _call(self, protocol, userstate, req, config): log.debug("Get._call start (%r, %r)", req.user_id, req.game) if req.lock: # If a lock is requested we need to keep control within this 'with' block # until we receive a ReleaseLock message but we also need to "return" # the userstate requested. This is why this method and UUSSAction.get_response # are contextmanagers. 'yield' allows us to return the value without # leaving the 'with' block. with userstate.open( req.user_id, create_if_missing=req.create_if_missing, lock_timeout=req.lock_timeout, max_wait=req.lock_max_wait, label=req.lock_label, raw=True ) as (state, chunked): log.debug("Get._call pre-yield (%r, %r)", req.user_id, req.game) yield self._build_response(state, chunked, req.game, req.user_id) log.debug("Get._call post-yield (%r, %r)", req.user_id, req.game) # we require a ReleaseLock message before we can leave this context and release the lock self._wait_for_release_lock(protocol, req, config) else: (state, chunked) = userstate.get(req.user_id, req.create_if_missing, raw=True) log.debug("Get._call pre-yield (%r, %r)", req.user_id, req.game) yield self._build_response(state, chunked, req.game, req.user_id) log.debug("Get._call post-yield (%r, %r)", req.user_id, req.game) log.debug("Get._call end (%r, %r)", req.user_id, req.game) def _wait_for_release_lock(self, protocol, get_req, config): """ Same as the normal server loop except that we will break once we get and process a ReleaseLock message. @see server.connection.ConnectionHandler.run """ log.debug("Get._wait_for_release_lock start (%r, %r)", get_req.user_id, get_req.game) while True: log.debug("Get._wait_for_release_lock loop (%r, %r)", get_req.user_id, get_req.game) (version, req) = protocol.recv_message() if req.__class__ is ReleaseLock: if req.game != get_req.game: raise UUSSProtocolException("ReleaseLock.game (%r) != GetRequest.game (%r)" % (req.game, get_req.game)) if req.user_id != get_req.user_id: raise UUSSProtocolException("ReleaseLock.user_id (%r) != GetRequest.user_id (%r)" % (req.user_id, get_req.user_id)) with get_processor_for_message(req, version).get_response(protocol, req, config) as resp: protocol.send_message(resp, version) if req.__class__ is ReleaseLock: log.debug("Get._wait_for_release_lock end (%r, %r)", get_req.user_id, req.game) return def _build_response(self, state, chunked, game, user_id): if not chunked: # get the state in a chunked format for sending along the wire # there will be only a master chunk with no chunk config specified state = chunking.blow_chunks(state) resp = self.Response() resp.game = game resp.user_id = user_id if state is None: resp.state = "" else: resp.state = state return resp class Save(UUSSAction): def __init__(self): self.Request = SaveRequest self.Response = SaveResponse @contextlib.contextmanager def _call(self, protocol, userstate, req, config): log.debug("Save._call start (%r, %r)", req.user_id, req.game) userstate.save(req.user_id, req.state) resp = self.Response() resp.game = req.game resp.user_id = req.user_id yield resp log.debug("Save._call end (%r, %r)", req.user_id, req.game) class Lock(UUSSAction): def __init__(self): self.Request = ReleaseLock self.Response = LockReleased @contextlib.contextmanager def _call(self, protocol, userstate, req, config): log.debug("Lock._call start (%r, %r)", req.user_id, req.game) resp = self.Response() resp.game = req.game resp.user_id = req.user_id log.debug("Lock._call pre-yield (%r, %r)", req.user_id, req.game) yield resp log.debug("Lock._call post-yield (%r, %r)", req.user_id, req.game) log.debug("Lock._call end (%r, %r)", req.user_id, req.game) class Delete(UUSSAction): def __init__(self): self.Request = DeleteRequest self.Response = DeleteResponse @contextlib.contextmanager def _call(self, protocol, userstate, req, config): if userstate.is_remote: raise UUSSProtocolException( "DeleteRequest sent for user_id %r game %r but that game is remote. I will only delete userstates in my local games." % (req.user_id, req.game)) with userstate.open(req.user_id, label='UUSS.Delete') as state: log.warn("[w:delete_userstate] Deleting userstate for user_id %r game %r, state follows\n%s", req.user_id, req.game, json.dumps(state)) userstate.delete(req.user_id) resp = self.Response() resp.game = req.game resp.user_id = req.user_id yield resp ## UUMS protocol ## class GetMessages(UUSSAction): def __init__(self): self.Request = GetMessagesRequest self.Response = GetMessagesResponse @contextlib.contextmanager def _call(self, protocol, userstate, req, config): log.debug("GetMessages._call start (%r, %r)", req.user_id, req.game) resp = self.Response() resp.game = req.game resp.user_id = req.user_id resp.messages.extend([ simplejson.dumps(m) for m in userstate.get_messages(req.user_id) ]) log.debug("GetMessages._call pre-yield (%r, %r)", req.user_id, req.game) yield resp log.debug("GetMessages._call post-yield (%r, %r)", req.user_id, req.game) log.debug("GetMessages._call end (%r, %r)", req.user_id, req.game) class SendMessage(UUSSAction): def __init__(self): self.Request = SendMessageRequest self.Response = SendMessageResponse @contextlib.contextmanager def _call(self, protocol, userstate, req, config): log.debug("SendMessage._call start (%r, %r)", req.user_id, req.game) if config.get('uums.send_message_type', 'direct') == 'mq': log.debug("Using mq") if not req.message_id: req.message_id = uums.new_message_id() msg_id = req.message_id model.mq.send(model.MQ_UUSS, ''.join(UUSSProtocol._encode_message(req))) else: send_message = userstate.send_message if hasattr(userstate, 'send_message') else userstate.send_message_from msg_id = send_message( req.source_game, req.source_user_id, req.user_id, simplejson.loads(req.message), req.priority) resp = self.Response() resp.game = req.game resp.user_id = req.user_id resp.message_id = msg_id log.debug("SendMessage._call pre-yield (%r, %r)", req.user_id, req.game) yield resp log.debug("SendMessage._call post-yield (%r, %r)", req.user_id, req.game) log.debug("SendMessage._call end (%r, %r)", req.user_id, req.game) class RemoveMessages(UUSSAction): def __init__(self): self.Request = RemoveMessagesRequest self.Response = RemoveMessagesResponse @contextlib.contextmanager def _call(self, protocol, userstate, req, config): log.debug("RemoveMessages._call start (%r, %r)", req.user_id, req.game) userstate.remove_messages(req.user_id, req.message_ids) resp = self.Response() resp.game = req.game resp.user_id = req.user_id log.debug("RemoveMessages._call pre-yield (%r, %r)", req.user_id, req.game) yield resp log.debug("RemoveMessages._call post-yield (%r, %r)", req.user_id, req.game) log.debug("RemoveMessages._call end (%r, %r)", req.user_id, req.game) ## Ping! class PingPong(UUSSAction): def __init__(self): self.Request = Ping self.Response = Pong @contextlib.contextmanager def get_response(self, protocol, req, config): log.debug("Ping._call start (%r)", req.counter) resp = self.Response() resp.counter = req.counter log.debug("Ping._call pre-yield (%r)", req.counter) if protocol.fail_healthcheck: raise UUSSFailHealthcheckException("Failing healthcheck") yield resp log.debug("Ping._call post-yield (%r)", req.counter) log.debug("Ping._call end (%r)", req.counter) MSG_PROCESSORS = { 3: [ Get(), Save(), Lock(), Delete(), GetMessages(), SendMessage(), RemoveMessages(), PingPong() ], 2: [ Get(), Save(), Lock(), GetMessages(), SendMessage(), RemoveMessages(), PingPong() ] } MSG_TYPES = dict( [(version, ([t.Request for t in processors] + [t.Response for t in processors] + [ExceptionResponse])) for version, processors in MSG_PROCESSORS.iteritems()] ) MSG_TYPES_LOOKUP = dict( [(version, dict(zip(msg_types, range(len(msg_types))))) for version, msg_types in MSG_TYPES.iteritems()] ) MSG_TYPES_PROCESSOR_LOOKUP = dict( [(version, dict([(p.Request, p) for p in processors] + [(p.Response, p) for p in processors])) for version, processors in MSG_PROCESSORS.iteritems()] ) log.debug("MSG_TYPES: %r", MSG_TYPES) log.debug("MSG_TYPES_LOOKUP: %r", MSG_TYPES_LOOKUP) log.debug("MSG_TYPES_PROCESSOR_LOOKUP: %r", MSG_TYPES_PROCESSOR_LOOKUP) VERSION_HEADER_FORMAT = '!H' VERSION_HEADER_LENGTH = struct.calcsize(VERSION_HEADER_FORMAT) # The first value must always be H and be the version HEADER_FORMAT = { # Version, Message type, Data length 2: '!BL', # Version, Message type, Data length 3: '!BL' } HEADER_LENGTH = dict( [(version, struct.calcsize(header_format)) for version, header_format in HEADER_FORMAT.iteritems()] ) VERSION = sorted(HEADER_FORMAT.keys())[-1] VERSIONS = HEADER_FORMAT.keys() def get_processor_for_message(msg, version=VERSION): return MSG_TYPES_PROCESSOR_LOOKUP[version][msg.__class__] class UUSSProtocol(object): def __init__(self, socket, config=None): if isinstance(socket, lolsocket.LolSocket): self.socket = socket else: self.socket = lolsocket.LolSocket(socket) self.config = config or {} self.fail_healthcheck = False @staticmethod def _parse_version_header(data): (version,) = struct.unpack(VERSION_HEADER_FORMAT, data[:VERSION_HEADER_LENGTH]) log.debug("Received message version: %r", version) if version not in VERSIONS: raise UUSSProtocolException("Message version received is %r, we expected one of %r" % (version, VERSIONS)) remaining_data = data[VERSION_HEADER_LENGTH:] return (version, remaining_data) @staticmethod def _parse_message_header(data, version): header_length = HEADER_LENGTH[version] (msg_type, msg_len) = struct.unpack(HEADER_FORMAT[version], data[:header_length]) log.debug("Received message header: %r, %r", msg_type, msg_len) try: msg_class = MSG_TYPES[version][msg_type] except IndexError, e: raise UUSSProtocolException("Invalid message type received: %r" % msg_type) msg_data = data[header_length:] if len(msg_data) != 0 and len(msg_data) != msg_len: raise UUSSProtocolException("Length of data (%r) does not match the length set in the header (%r)" % (len(msg_data), msg_len)) return (msg_type, msg_len, msg_data) @staticmethod def _parse_message_body(msg_type, data, version): #log.debug("data: %r", data) msg = MSG_TYPES[version][msg_type]() msg.ParseFromString(data) if msg.__class__ is ExceptionResponse: if 'ShardDown' in msg.message: raise UUSSShardDownException("The shard for this userstate is down: %r\n%s" % (msg.message, msg.traceback)) else: raise UUSSProtocolException("Exception received from UUSS server: %r\n%s" % (msg.message, msg.traceback)) return msg @staticmethod def _parse_message(data): (version, remaining_data) = UUSSProtocol._parse_version_header(data) (msg_type, msg_len, msg_data) = UUSSProtocol._parse_message_header(remaining_data, version) return (version, UUSSProtocol._parse_message_body(msg_type, msg_data, version)) def _recv_version_header(self): data = self.socket.recv_bytes(VERSION_HEADER_LENGTH) return self._parse_version_header(data)[0] def _recv_message_header(self, version): data = self.socket.recv_bytes(HEADER_LENGTH[version]) return self._parse_message_header(data, version)[:-1] def _recv_message_body(self, msg_type, msg_len): # XXX(jpatrin): why does this not parse like the above 2? return self.socket.recv_bytes(msg_len) @staticmethod def _encode_message(msg, version=VERSION): data = msg.SerializeToString() msg_type = MSG_TYPES_LOOKUP[version][msg.__class__] header = struct.pack(VERSION_HEADER_FORMAT, version) + struct.pack(HEADER_FORMAT[version], msg_type, len(data)) return (header, data) ## public interface def send_message(self, msg, version=VERSION): self.socket.send_bytes(''.join(self._encode_message(msg, version))) def recv_expected_message_class(self, expected_msg_class): (version, msg) = self.recv_message() if msg.__class__ is not expected_msg_class: raise UUSSProtocolException("Message class %r expected, got %r instead: %r" % (expected_msg_class, msg.__class__, msg)) return (version, msg) def recv_expected_message(self, expected_msg_class, expected_game, expected_user_id): (version, msg) = self.recv_expected_message_class(expected_msg_class) if msg.game != expected_game: raise UUSSProtocolException("Game %r expected, got %r instead" % (expected_game, msg.game)) if msg.user_id != expected_user_id: raise UUSSProtocolException("User ID %r expected, got %r instead" % (expected_user_id, msg.user_id)) return (version, msg) def recv_message(self): version = self._recv_version_header() (msg_type, msg_len) = self._recv_message_header(version) log.debug("msg_type: %r", msg_type) log.debug("msg_len: %r", msg_len) data = self._recv_message_body(msg_type, msg_len) return (version, self._parse_message_body(msg_type, data, version))<|fim▁end|>
<|file_name|>email-input.js<|end_file_name|><|fim▁begin|>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = undefined; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = require("react"); var _react2 = _interopRequireDefault(_react); var _propTypes = require("prop-types"); var _propTypes2 = _interopRequireDefault(_propTypes); var _textInput = require("./text-input"); var _textInput2 = _interopRequireDefault(_textInput); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } function validateEmail(email) { var re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; return re.test(email); } var EmailInput = function (_TextInput) { _inherits(EmailInput, _TextInput); function EmailInput(props) { _classCallCheck(this, EmailInput); return _possibleConstructorReturn(this, (EmailInput.__proto__ || Object.getPrototypeOf(EmailInput)).call(this, props)); } _createClass(EmailInput, [{ key: "onValid", value: function onValid(e) { if (!!this.props.onValid) { this.props.onValid(validateEmail(e.target.value), e); } } }]); return EmailInput; }(_textInput2.default); exports.default = EmailInput; EmailInput.propTypes = { type: _propTypes2.default.string.isRequired<|fim▁hole|> type: "email" };<|fim▁end|>
}; EmailInput.defaultProps = {
<|file_name|>test.js<|end_file_name|><|fim▁begin|>/** * author: Shawn * time : 2017/8/15 17:19<|fim▁hole|> var testJar = require('./testJar'); for (let i = 0; i < 10; i++) { testJar.tt(); } // testJar.tt(); // testJar.tt(); // testJar.tt();<|fim▁end|>
* desc : */
<|file_name|>test_docs.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Money doctests as unittest Suite """ # RADAR: Python2 from __future__ import absolute_import import doctest import unittest # RADAR: Python2 import money.six FILES = ( '../../README.rst',<|fim▁hole|> def load_tests(loader, tests, pattern): # RADAR Python 2.x if money.six.PY2: # Doc tests are Python 3.x return unittest.TestSuite() return doctest.DocFileSuite(*FILES)<|fim▁end|>
)
<|file_name|>instrument-mappings.test.js<|end_file_name|><|fim▁begin|>/* eslint-disable no-undef,no-unused-expressions */ const request = require('supertest') const expect = require('chai').expect const app = require('../../bin/www') const fixtures = require('../data/fixtures') describe('/api/mappings', () => { beforeEach(() => { this.Sample = require('../../models').Sample this.Instrument = require('../../models').Instrument this.InstrumentMapping = require('../../models').InstrumentMapping this.ValidationError = require('../../models').sequelize.ValidationError expect(this.Sample).to.exist expect(this.Instrument).to.exist expect(this.InstrumentMapping).to.exist expect(this.ValidationError).to.exist return require('../../models').sequelize .sync({force: true, logging: false}) .then(() => { console.log('db synced') return this.Sample .bulkCreate(fixtures.samples) }) .then(samples => { this.samples = samples return this.Instrument .bulkCreate(fixtures.instruments) }) .then(instruments => { return this.InstrumentMapping.bulkCreate(fixtures.instrumentMappings) }) .then(() => console.log('Fixtures loaded')) }) it('should return 200 on GET /api/instruments/:instrumentId/mappings', () => { return request(app) .get('/api/instruments/a35c6ac4-53f7-49b7-82e3-7a0aba5c2c45/mappings') .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(200) .then((res) => { expect(res.body, 'body should be an array').to.be.an('array') expect(res.body, 'body should contain 2 items').to.have.lengthOf(2) expect(res.body[0], 'item 0 should be an object').to.be.an('object') }) }) it('should return 201 on POST /api/instruments/:instrumentId/mappings', () => { return request(app) .post('/api/instruments/a35c6ac4-53f7-49b7-82e3-7a0aba5c2c45/mappings') .send({ lowerRank: 55, upperRank: 56, referenceRank: 55, sampleId: '636f247a-dc88-4b52-b8e8-78448b5e5790' }) .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(201) .then(res => { expect(res.body, 'body should be an object') expect(res.body.lowerRank, 'lowerRank should equal 55').to.equal(55) expect(res.body.upperRank, 'upperRank should equal 56').to.equal(56) expect(res.body.referenceRank, 'referenceRank should equal 55').to.equal(55) expect(res.body.sampleId).to.equal('636f247a-dc88-4b52-b8e8-78448b5e5790', 'sampleId should equal 636f247a-dc88-4b52-b8e8-78448b5e5790') expect(res.body.instrumentId).to.equal('a35c6ac4-53f7-49b7-82e3-7a0aba5c2c45', 'instrumentId should equal a35c6ac4-53f7-49b7-82e3-7a0aba5c2c45') }) }) it('should return 200 GET /api/mappings/:id', () => { return request(app) .get('/api/mappings/1bcab515-ed82-4449-aec9-16a6142b0d15') .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(200) .then((res) => { expect(res.body, 'body should be an object').to.be.an('object') expect(res.body.id, 'id should equal 1bcab515-ed82-4449-aec9-16a6142b0d15').to.equal('1bcab515-ed82-4449-aec9-16a6142b0d15') }) }) it('should return 200 on PUT /api/mappings/:id', () => { return request(app) .put('/api/mappings/712fda5f-3ff5-4e23-8949-320a96e0d565') .send({ lowerRank: 45, upperRank: 46, referenceRank: 45, sampleId: '0f1ed577-955a-494d-868c-cf4dc5c3c892' }) .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(200) .then(res => { expect(res.body.lowerRank, 'lowerRank should equal 45').to.equal(45) expect(res.body.upperRank, 'upperRank should equal 46').to.equal(46) expect(res.body.referenceRank, 'referenceRank should equal 45').to.equal(45) expect(res.body.sampleId).to.equal('0f1ed577-955a-494d-868c-cf4dc5c3c892', 'sampleId should equal 0f1ed577-955a-494d-868c-cf4dc5c3c892') }) }) it('should return 404 on PUT /api/mappings/:id when id is unknown', () => { return request(app) .put('/api/mappings/bb459a9e-0d2c-4da1-b538-88ea43d30f8c') .send({ sampleId: '0f1ed577-955a-494d-868c-cf4dc5c3c892' }) .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(404) .then((res) => { expect(res.body, 'body should be a object').to.be.an('object') expect(res.body).to.include({ msg: 'Failed to retrieve instrument mapping n°bb459a9e-0d2c-4da1-b538-88ea43d30f8c',<|fim▁hole|> }) }) it('should return 204 on DELETE /api/mappings/:id', () => { return request(app) .delete('/api/mappings/712fda5f-3ff5-4e23-8949-320a96e0d565') .expect(204) .then((res) => { expect(res.body, 'body should be empty').to.be.empty }) }) it('should return 404 on DELETE /api/mappings/:id when id is unknown', () => { return request(app) .delete('/api/mappings/bb459a9e-0d2c-4da1-b538-88ea43d30f8c') .set('Accept', 'application/json') .expect('Content-Type', /json/) .expect(404) .then((res) => { expect(res.body, 'body should be a object').to.be.an('object') expect(res.body).to.include({ msg: 'Failed to retrieve instrument mapping n°bb459a9e-0d2c-4da1-b538-88ea43d30f8c', name: 'DatabaseError' }) }) }) })<|fim▁end|>
name: 'DatabaseError' })
<|file_name|>action_batcher.rs<|end_file_name|><|fim▁begin|>// OpenAOE: An open source reimplementation of Age of Empires (1997) // Copyright (c) 2016 Kevin Fuller // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. //<|fim▁hole|>// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. use action::Action; use specs::Index; use std::collections::HashMap; use std::mem; pub struct ActionBatcher { actions: HashMap<Index, Vec<Action>>, } impl ActionBatcher { pub fn new() -> ActionBatcher { ActionBatcher { actions: HashMap::new() } } pub fn queue_for_entity(&mut self, entity_id: Index, action: Action) { if !self.actions.contains_key(&entity_id) { self.actions.insert(entity_id, Vec::new()); } self.actions.get_mut(&entity_id).unwrap().push(action); } pub fn consume_actions(&mut self) -> HashMap<Index, Vec<Action>> { let mut consumed = HashMap::new(); mem::swap(&mut consumed, &mut self.actions); consumed } }<|fim▁end|>
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
<|file_name|>motion_metrics_ops_test.py<|end_file_name|><|fim▁begin|># Copyright 2021 The Waymo Open Dataset Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import numpy as np import tensorflow as tf from google.protobuf import text_format from waymo_open_dataset.metrics.ops import py_metrics_ops from waymo_open_dataset.protos import motion_metrics_pb2 class MotionMetricsOpsTest(tf.test.TestCase): """Unit tests for motion metrics.""" def _BuildConfig(self, additional_config_str=''): """Builds a metrics config.""" config = motion_metrics_pb2.MotionMetricsConfig() config_text = """ track_steps_per_second: 10 prediction_steps_per_second: 10 track_history_samples: 0 track_future_samples: 4 step_configurations { measurement_step: 3 lateral_miss_threshold: 1.0 longitudinal_miss_threshold: 2.0 } max_predictions: 6 speed_scale_lower: 1.0 speed_scale_upper: 1.0 speed_lower_bound: 1.4 speed_upper_bound: 11.0 """ + additional_config_str text_format.Parse(config_text, config) return config def _CreateTestScenario(self): gt_scenario_id = ['test'] gt_object_id = [[1, 2]] gt_object_type = [[1, 1]] gt_is_valid = np.ones([1, 2, 5], dtype=np.bool) gt_trajectory = np.reshape([[[2, 2, 1, 1, 0.78539816, 20.0, 20.0], [4, 4, 1, 1, 0.78539816, 20.0, 20.0], [6, 6, 1, 1, 0.78539816, 20.0, 20.0], [8, 8, 1, 1, 0.78539816, 20.0, 20.0], [10, 10, 1, 1, 0.78539816, 20.0, 20.0]], [[-1, 0, 1, 1, 3.14159, -10.0, 0.0], [-2, 0, 1, 1, 3.14159, -10.0, 0.0], [-3, 0, 1, 1, 3.14159, -10.0, 0.0], [-4, 0, 1, 1, 3.14159, -10.0, 0.0], [-5, 0, 1, 1, 3.14159, -10.0, 0.0]]], [1, 2, 5, 7]) pred_gt_indices = np.reshape([0, 1], (1, 1, 2)) pred_gt_indices_mask = np.ones((1, 1, 2)) > 0.0 return { 'scenario_id': gt_scenario_id, 'object_id': gt_object_id, 'object_type': gt_object_type, 'gt_is_valid': gt_is_valid, 'gt_trajectory': gt_trajectory, 'pred_gt_indices': pred_gt_indices, 'pred_gt_indices_mask': pred_gt_indices_mask, } def setUp(self): super(MotionMetricsOpsTest, self).setUp() self._config = self._BuildConfig() self._gt = self._CreateTestScenario() def _RunEval(self, pred_score, pred_trajectory, gt=None, config=None): if not gt: gt = self._gt if not config: config = self._config g = tf.Graph() with g.as_default(): (min_ade, min_fde, miss_rate, overlap_rate, mean_ap) = py_metrics_ops.motion_metrics( config=config.SerializeToString(), prediction_trajectory=pred_trajectory, prediction_score=pred_score, ground_truth_trajectory=gt['gt_trajectory'], ground_truth_is_valid=gt['gt_is_valid'], prediction_ground_truth_indices=gt['pred_gt_indices'], prediction_ground_truth_indices_mask=gt['pred_gt_indices_mask'], object_type=gt['object_type'], object_id=gt['object_id'], scenario_id=gt['scenario_id']) with self.test_session(graph=g) as sess: return sess.run([min_ade, min_fde, miss_rate, overlap_rate, mean_ap]) def testComputeMissRateNoMisses(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]], (1, 1, 1, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 1.0) def testComputeMissRateNoMisses2(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[-2, 0], [-3, 0], [-4, 0], [-5, 0]], [[4, 4], [6, 6], [8, 8], [10, 10]]], (1, 1, 1, 2, 4, 2)) gt = copy.deepcopy(self._gt) gt['pred_gt_indices'] = np.reshape([1, 0], (1, 1, 2)) val = self._RunEval(pred_score, pred_trajectory, gt=gt) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 1.0) def testComputeMissRateLateral_2(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape( [[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 1.01], [-3, 1.01], [-4, 1.01], [-5, 1.01]]], (1, 1, 1, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 0.0) def testComputeMissRateLateral_1(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[4, 4], [6, 6], [8, 8], [9.292, 10.708]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]], (1, 1, 1, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 0.0) def testComputeMissRateLongitudinal_2(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-7.01, 0]]], (1, 1, 1, 2, 4, 2))<|fim▁hole|> # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 0.0) def testComputeMissRateLongitudinal_1(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[4, 4], [6, 6], [8, 8], [11.415, 11.415]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]], (1, 1, 1, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 0.0) def testComputeNoMissLongitudinal_1(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[4, 4], [6, 6], [8, 8], [11.414, 11.414]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]], (1, 1, 1, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 1.0) def testComputeVelocityScalingLatitudinal(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0.75]]], (1, 1, 1, 2, 4, 2)) config = motion_metrics_pb2.MotionMetricsConfig() config.CopyFrom(self._config) config.speed_scale_lower = 0.5 config.speed_scale_upper = 1.0 config.speed_lower_bound = 1.0 config.speed_upper_bound = 3.0 val = self._RunEval(pred_score, pred_trajectory, config=config) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 1.0) # Decrease the velocity below the speed lower bound. gt = copy.deepcopy(self._gt) gt['gt_trajectory'][0, 1, :, 5:7] = 0.0 val = self._RunEval(pred_score, pred_trajectory, config=config, gt=gt) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # Set the velocity to just below the speed required for object2 to fit. gt = copy.deepcopy(self._gt) gt['gt_trajectory'][0, 1, :, 5] = 1.999 val = self._RunEval(pred_score, pred_trajectory, config=config, gt=gt) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # Set the velocity to just above the speed required for object2 to fit. gt = copy.deepcopy(self._gt) gt['gt_trajectory'][0, 1, :, 5] = 2.001 val = self._RunEval(pred_score, pred_trajectory, config=config, gt=gt) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) def testComputeVelocityScalingLongitudinal(self): pred_score = np.reshape([0.5], (1, 1, 1)) pred_trajectory = np.reshape([[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-6.5, 0]]], (1, 1, 1, 2, 4, 2)) config = motion_metrics_pb2.MotionMetricsConfig() config.CopyFrom(self._config) config.speed_scale_lower = 0.5 config.speed_scale_upper = 1.0 config.speed_lower_bound = 1.0 config.speed_upper_bound = 3.0 val = self._RunEval(pred_score, pred_trajectory, config=config) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 1.0) # Decrease the velocity below the speed lower bound. gt = copy.deepcopy(self._gt) gt['gt_trajectory'][0, 1, :, 5:7] = 0.0 val = self._RunEval(pred_score, pred_trajectory, config=config, gt=gt) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # Set the velocity to just below the speed required for object2 to fit. gt = copy.deepcopy(self._gt) gt['gt_trajectory'][0, 1, :, 5] = 1.999 val = self._RunEval(pred_score, pred_trajectory, config=config, gt=gt) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # Set the velocity to just above the speed required for object2 to fit. gt = copy.deepcopy(self._gt) gt['gt_trajectory'][0, 1, :, 5] = 2.001 val = self._RunEval(pred_score, pred_trajectory, config=config, gt=gt) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) def testComputeNoMissLateral_2(self): pred_score = np.reshape([0.8, 0.5], (1, 1, 2)) pred_trajectory = np.reshape([[[[4, 4], [6, 6], [8, 8], [9.294, 10.706]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]], [[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]]], (1, 1, 2, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 1.0) def testTwoJointPredictionsNoMiss(self): pred_score = np.reshape([0.8, 0.5], (1, 1, 2)) pred_trajectory = np.reshape([[[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-7.01, 0]]], [[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]]], (1, 1, 2, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 0.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 0.5) def testTwoJointPredictionsMiss(self): pred_score = np.reshape([0.8, 0.5], (1, 1, 2)) pred_trajectory = np.reshape([[[[4, 4], [6, 6], [8, 8], [10, 10]], [[-2, 0], [-3, 0], [-4, 0], [-7.01, 0]]], [[[4, 4], [6, 6], [8, 8], [14, 14]], [[-2, 0], [-3, 0], [-4, 0], [-5, 0]]]], (1, 1, 2, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # miss_rate of Vehicle. self.assertEqual(val[2][0], 1.0) # mean_ap of Vehicle. self.assertEqual(val[4][0], 0.0) def testComputeMinADE(self): pred_score = np.reshape([0.5, 0.5], (1, 1, 2)) pred_trajectory = np.reshape( [[[[4, 0], [6, 0], [8, 0], [10, 0]], [[0, 2], [0, 3], [0, 4], [0, 5]]], [[[14, 0], [16, 0], [18, 0], [20, 0]], [[0, 22], [0, 23], [0, 24], [0, 25]]]], (1, 1, 2, 2, 4, 2)) val = self._RunEval(pred_score, pred_trajectory) # 5 metrics. self.assertEqual(len(val), 5) # 3 steps. self.assertEqual(len(val[0]), 3) # ADE of Vehicle. self.assertAlmostEqual(val[0][0], 5.97487, delta=1e-4) # FDE of Vehicle. self.assertAlmostEqual(val[1][0], 8.53553, delta=1e-4) if __name__ == '__main__': tf.compat.v1.disable_eager_execution() tf.test.main()<|fim▁end|>
val = self._RunEval(pred_score, pred_trajectory)
<|file_name|>app-paths.js<|end_file_name|><|fim▁begin|>const fs = require('fs') const { normalize, resolve, join, sep } = require('path') function getAppDir () {<|fim▁hole|> let dir = process.cwd() while (dir.length && dir[dir.length - 1] !== sep) { if (fs.existsSync(join(dir, 'quasar.conf.js'))) { return dir } dir = normalize(join(dir, '..')) } const { fatal } = require('./helpers/logger') fatal(`Error. This command must be executed inside a Quasar v1+ project folder.`) } const appDir = getAppDir() const cliDir = resolve(__dirname, '..') const srcDir = resolve(appDir, 'src') const pwaDir = resolve(appDir, 'src-pwa') const ssrDir = resolve(appDir, 'src-ssr') const cordovaDir = resolve(appDir, 'src-cordova') const capacitorDir = resolve(appDir, 'src-capacitor') const electronDir = resolve(appDir, 'src-electron') const bexDir = resolve(appDir, 'src-bex') module.exports = { cliDir, appDir, srcDir, pwaDir, ssrDir, cordovaDir, capacitorDir, electronDir, bexDir, resolve: { cli: dir => join(cliDir, dir), app: dir => join(appDir, dir), src: dir => join(srcDir, dir), pwa: dir => join(pwaDir, dir), ssr: dir => join(ssrDir, dir), cordova: dir => join(cordovaDir, dir), capacitor: dir => join(capacitorDir, dir), electron: dir => join(electronDir, dir), bex: dir => join(bexDir, dir) } }<|fim▁end|>
<|file_name|>test_bleuscore.py<|end_file_name|><|fim▁begin|># ---------------------------------------------------------------------------- # Copyright 2015-2016 Nervana Systems Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ---------------------------------------------------------------------------- ''' Test BLEUScore metric against reference '''<|fim▁hole|>from neon.transforms.cost import BLEUScore def test_bleuscore(): # dataset with two sentences sentences = ["a quick brown fox jumped", "the rain in spain falls mainly on the plains"] references = [["a fast brown fox jumped", "a quick brown fox vaulted", "a rapid fox of brown color jumped", "the dog is running on the grass"], ["the precipitation in spain falls on the plains", "spanish rain falls for the most part on the plains", "the rain in spain falls in the plains most of the time", "it is raining today"]] # reference scores for the given set of reference sentences bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4 # compute scores bleu_metric = BLEUScore() bleu_metric(sentences, references) # check against references for score, reference in zip(bleu_metric.bleu_n, bleu_score_references): assert round(score, 1) == reference if __name__ == '__main__': test_bleuscore()<|fim▁end|>
<|file_name|>test_schedule_jobs.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*- # This file is part of PYBOSSA. # # Copyright (C) 2015 Scifabric LTD. # # PYBOSSA is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. #<|fim▁hole|># GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>. from pybossa.jobs import schedule_job from rq_scheduler import Scheduler import settings_test from redis.sentinel import Sentinel def a_function(): return def another_function(): return a_job = dict(name=a_function, args=[], kwargs={}, interval=1, timeout=180) another_job = dict(name=another_function, args=[], kwargs={}, interval=1, timeout=180) class TestSetupScheduledJobs(object): """Tests for setup function 'schedule_job'""" def setUp(self): sentinel = Sentinel(settings_test.REDIS_SENTINEL) db = getattr(settings_test, 'REDIS_DB', 0) self.connection = sentinel.master_for('mymaster', db=db) self.connection.flushall() self.scheduler = Scheduler('test_queue', connection=self.connection) def test_adds_scheduled_job_with_interval(self): a_job['interval'] = 7 schedule_job(a_job, self.scheduler) sched_jobs = self.scheduler.get_jobs() assert len(sched_jobs) == 1, sched_jobs assert sched_jobs[0].meta['interval'] == 7 , sched_jobs[0].meta a_job['interval'] = 1 def test_adds_several_jobs_(self): schedule_job(a_job, self.scheduler) schedule_job(another_job, self.scheduler) sched_jobs = self.scheduler.get_jobs() job_func_names = [job.func_name for job in sched_jobs] module_name = 'test_jobs.test_schedule_jobs' assert len(sched_jobs) == 2, sched_jobs assert module_name + '.a_function' in job_func_names, job_func_names assert module_name + '.another_function' in job_func_names, job_func_names def test_does_not_add_job_if_already_added(self): schedule_job(a_job, self.scheduler) schedule_job(a_job, self.scheduler) sched_jobs = self.scheduler.get_jobs() assert len(sched_jobs) == 1, sched_jobs def test_returns_log_messages(self): success_message = schedule_job(a_job, self.scheduler) failure_message = schedule_job(a_job, self.scheduler) assert success_message == 'Scheduled a_function([], {}) to run every 1 seconds' assert failure_message == 'WARNING: Job a_function([], {}) is already scheduled' def test_failed_attempt_to_schedule_does_not_polute_redis(self): schedule_job(a_job, self.scheduler) schedule_job(a_job, self.scheduler) stored_values = self.connection.keys('rq:job*') assert len(stored_values) == 1, len(stored_values)<|fim▁end|>
# PYBOSSA is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<|file_name|>interface_sg.py<|end_file_name|><|fim▁begin|>import os from src.core import prep from sgprocessor import * def ProcessSg(p, opts): if opts.anno == True: if 'BEDDB' not in os.environ: p.error('$BEDDB Not Exist. See README') str_path_sgfq = opts.sg str_nm = os.path.basename(os.path.splitext(opts.sg)[0]) str_proj = 'aux' str_path_proj = os.path.join(opts.tdir, str_proj) if not os.path.exists(str_path_proj): os.makedirs(str_path_proj) str_path_sgpsam = os.path.join(str_path_proj, str_nm + '.sgpsam') str_path_sgsam = os.path.join(str_path_proj, str_nm + '.sgsam') str_path_sg = os.path.join(opts.tdir, str_nm + '.sg') print('Mapping sgRNA seq to ref genome with Bwa...') prep.CallBWA(str_path_sgfq, '', opts.ref, str_path_sgpsam, False, opts.thrd) prep.FilterSam(str_path_sgpsam, str_path_sgsam, False) print('Done') print('Processing sgsam...') OrganizeSgsam(str_path_sgsam, str_path_sg) print('Done') if opts.anno == True: str_path_sgbed = os.path.join(str_path_proj, str_nm + '.sgbed') str_path_sgmap = os.path.join(str_path_proj, str_nm + '.sgmap') str_path_sga = os.path.join(opts.tdir, str_nm + '.sga') print('Annotating sgRNA...')<|fim▁hole|> int_status = AnnotateSg(str_path_sgsam, opts.ref, str_path_sgbed, str_path_sgmap) if int_status == 1: print('Annotated with RefSeq') elif int_status ==2: print('Annotated with RefSeq and UCSC Gene') elif int_status ==3: print('Annotated with RefSeq, UCSC Gene and GENCODE') elif int_status == 4: print('Annotated with RefSeq and UCSC Gene') print('Warning: Some are marked with None') elif int_status == 5: print('Annotated with RefSeq, UCSC Gene and GENCODE') print('Warning: Some are marked with None') print('Done') print('Merging sg and sgmap...') MergeSg(str_path_sg, str_path_sgmap, str_path_sga) print('Done')<|fim▁end|>
<|file_name|>error_handler_test.js<|end_file_name|><|fim▁begin|>process.env.NODE_ENV = 'test'; var chai = require('chai'); var chaihttp = require('chai-http'); chai.use(chaihttp); var expect = chai.expect; require(__dirname + '/../app.js'); describe('the error handler function', function() { it('should return a status of 500', function(done) { chai.request('localhost:3000') .get('/products/fish') .end(function(err, res) { expect(res).to.have.status(500); expect(JSON.stringify(res.body)).to.eql('{"msg":"ERROR!!"}'); done(); });<|fim▁hole|> }); });<|fim▁end|>
<|file_name|>FileNameUtil.java<|end_file_name|><|fim▁begin|>/* * Copyright 2006-2016 The MZmine 3 Development Team * * This file is part of MZmine 3. * * MZmine 3 is free software; you can redistribute it and/or modify it under the terms of the GNU * General Public License as published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * MZmine 3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License along with MZmine 3; if not, * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301<|fim▁hole|> * USA */ package io.github.mzmine.util; import java.io.File; import java.util.List; import javax.annotation.Nonnull; import com.google.common.io.Files; /** * File name utilities */ public class FileNameUtil { public static @Nonnull String findCommonPrefix(@Nonnull List<File> fileNames) { if (fileNames.size() < 2) return ""; String firstName = fileNames.get(0).getName(); for (int prefixLen = 0; prefixLen < firstName.length(); prefixLen++) { char c = firstName.charAt(prefixLen); for (int i = 1; i < fileNames.size(); i++) { String ithName = fileNames.get(i).getName(); if (prefixLen >= ithName.length() || ithName.charAt(prefixLen) != c) { // Mismatch found return ithName.substring(0, prefixLen); } } } return firstName; } public static @Nonnull String findCommonSuffix(@Nonnull List<File> fileNames) { if (fileNames.isEmpty()) return ""; if (fileNames.size() == 1) { // Return file extension String ext = Files.getFileExtension(fileNames.get(0).getAbsolutePath()); return "." + ext; } String firstName = fileNames.get(0).getName(); for (int suffixLen = 0; suffixLen < firstName.length(); suffixLen++) { char c = firstName.charAt(firstName.length() - 1 - suffixLen); for (int i = 1; i < fileNames.size(); i++) { String ithName = fileNames.get(i).getName(); if (suffixLen >= ithName.length() || ithName.charAt(ithName.length() - 1 - suffixLen) != c) { // Mismatch found return ithName.substring(ithName.length() - suffixLen); } } } return firstName; } }<|fim▁end|>
<|file_name|>SurvivalAlertWin.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python #PYTHON 3 only # Sample Windows Startup check -- Mail alert # SurvivalAlert v1.0 # By thxer.com # N-pn.fr Community and Hexpresso CTF team import os import socket import ctypes import smtplib #Global Variables HOSTNAME = str(socket.gethostname()) IPLAN = str(socket.gethostbyname(socket.gethostname())) AUTHORIZE_USER = ['Users','Utilisateur'] #User wich are allow to use computers LIMIT_FREE_HDD_SPACE = 11 # Limit of free HDD space alert in GB #Email Settings TO = "[email protected]" # User who recept mail alert USER = "[email protected]" PWD = "smtp_passwd" SMTPSERV = "smtp.server_addres.com" <|fim▁hole|>#Check HDD Status def check_hdd(): """Check HDD disk with windows tools """ Hdd_status = os.popen("wmic diskdrive get status").read() for word in Hdd_status.split(): if not word in ["Status", "OK"]: ctypes.windll.user32.MessageBoxW(None, u"ALERT: HDD ERROR", u"ERROR CONTACT ADMIN NOW !", 0) send_mail("Warning HDD not SAFE !","Windows claims About unsafe HDD !") return Hdd_status def get_free_space(): """ Test first Drive Free space then alert < LIMIT_HDD_FREE_SPACE """ free_space = round(int(os.popen("wmic logicaldisk get freespace").read().split()[1])/1024/1024/1024) if free_space < LIMIT_FREE_HDD_SPACE : ctypes.windll.user32.MessageBoxW(None, u"ALERT: HDD FREE SPACE ERROR", u"ERROR CONTACT ADMIN NOW !", 0) msg = "Warning Free space is : " + str(free_space) + "GB" send_mail("Warning C: Free SPACE !",msg) return free_space def whois_log(): """ Get user Login name and alert if not in AUTHORIZE_USER list """ if not os.getlogin() in AUTHORIZE_USER : msg = "SUSPECT Login IN : " + os.getlogin() send_mail("SUSPECT LOGIN",msg) def send_mail(subject,message): subject = str(subject) message = str(message) server = smtplib.SMTP(SMTPSERV,25) # 587 for STARTLS server.ehlo() #server.starttls() # Un comment for use STARTTLS server.login(USER, PWD) header = 'TO:' + TO + '\n' + 'From: ' + USER + '\n' + 'Subject:'+ HOSTNAME + " | " + IPLAN + " " + subject +'\n' mail = header + '\n' + "PC : " + HOSTNAME + " IP LAN : " + IPLAN + "\n" + message + '\n\n' server.sendmail(USER, TO, mail ) server.close() if __name__ == '__main__': # Uncomment for test mail configuration #send_mail("Send a Test Mail","1337 Are In place N-pn") whois_log() get_free_space() check_hdd()<|fim▁end|>
<|file_name|>login_view.js<|end_file_name|><|fim▁begin|>/** A modal view for handling user logins @class LoginView @extends Discourse.ModalBodyView @namespace Discourse @module Discourse **/ Discourse.LoginView = Discourse.ModalBodyView.extend({ templateName: 'modal/login', siteBinding: 'Discourse.site', title: Em.String.i18n('login.title'), authenticate: null, loggingIn: false, showView: function(view) { return this.get('controller').show(view); }, newAccount: function() { return this.showView(Discourse.CreateAccountView.create()); }, forgotPassword: function() { return this.showView(Discourse.ForgotPasswordView.create()); }, loginButtonText: (function() {<|fim▁hole|> }).property('loggingIn'), loginDisabled: (function() { if (this.get('loggingIn')) { return true; } if (this.blank('loginName') || this.blank('loginPassword')) { return true; } return false; }).property('loginName', 'loginPassword', 'loggingIn'), login: function() { var _this = this; this.set('loggingIn', true); $.post("/session", { login: this.get('loginName'), password: this.get('loginPassword') }).success(function(result) { if (result.error) { _this.set('loggingIn', false); if( result.reason === 'not_activated' ) { return _this.showView(Discourse.NotActivatedView.create({username: _this.get('loginName'), sentTo: result.sent_to_email, currentEmail: result.current_email})); } _this.flash(result.error, 'error'); } else { return window.location.reload(); } }).fail(function(result) { _this.flash(Em.String.i18n('login.error'), 'error'); return _this.set('loggingIn', false); }); return false; }, authMessage: (function() { if (this.blank('authenticate')) { return ""; } return Em.String.i18n("login." + (this.get('authenticate')) + ".message"); }).property('authenticate'), twitterLogin: function() { var left, top; this.set('authenticate', 'twitter'); left = this.get('lastX') - 400; top = this.get('lastY') - 200; return window.open("/auth/twitter", "_blank", "menubar=no,status=no,height=400,width=800,left=" + left + ",top=" + top); }, facebookLogin: function() { var left, top; this.set('authenticate', 'facebook'); left = this.get('lastX') - 400; top = this.get('lastY') - 200; return window.open("/auth/facebook", "_blank", "menubar=no,status=no,height=400,width=800,left=" + left + ",top=" + top); }, openidLogin: function(provider) { var left, top; left = this.get('lastX') - 400; top = this.get('lastY') - 200; if (provider === "yahoo") { this.set("authenticate", 'yahoo'); return window.open("/auth/yahoo", "_blank", "menubar=no,status=no,height=400,width=800,left=" + left + ",top=" + top); } else { window.open("/auth/google", "_blank", "menubar=no,status=no,height=500,width=850,left=" + left + ",top=" + top); return this.set("authenticate", 'google'); } }, githubLogin: function() { var left, top; this.set('authenticate', 'github'); left = this.get('lastX') - 400; top = this.get('lastY') - 200; return window.open("/auth/github", "_blank", "menubar=no,status=no,height=400,width=800,left=" + left + ",top=" + top); }, personaLogin: function() { navigator.id.request(); }, authenticationComplete: function(options) { if (options.awaiting_approval) { this.flash(Em.String.i18n('login.awaiting_approval'), 'success'); this.set('authenticate', null); return; } if (options.awaiting_activation) { this.flash(Em.String.i18n('login.awaiting_confirmation'), 'success'); this.set('authenticate', null); return; } // Reload the page if we're authenticated if (options.authenticated) { window.location.reload(); return; } return this.showView(Discourse.CreateAccountView.create({ accountEmail: options.email, accountUsername: options.username, accountName: options.name, authOptions: Em.Object.create(options) })); }, mouseMove: function(e) { this.set('lastX', e.screenX); return this.set('lastY', e.screenY); }, didInsertElement: function(e) { var _this = this; return Em.run.next(function() { return $('#login-account-password').keydown(function(e) { if (e.keyCode === 13) { return _this.login(); } }); }); } });<|fim▁end|>
if (this.get('loggingIn')) { return Em.String.i18n('login.logging_in'); } return Em.String.i18n('login.title');
<|file_name|>R$primitive$attr$assign.java<|end_file_name|><|fim▁begin|>package org.renjin.primitives; import org.renjin.eval.Context; import org.renjin.eval.EvalException; import org.renjin.primitives.annotations.processor.ArgumentException; import org.renjin.primitives.annotations.processor.ArgumentIterator; import org.renjin.primitives.annotations.processor.WrapperRuntime; import org.renjin.sexp.BuiltinFunction; import org.renjin.sexp.Environment; import org.renjin.sexp.FunctionCall; import org.renjin.sexp.PairList; import org.renjin.sexp.SEXP; import org.renjin.sexp.StringVector; import org.renjin.sexp.Vector; public class R$primitive$attr$assign<|fim▁hole|>{ public R$primitive$attr$assign() { super("attr<-"); } public SEXP apply(Context context, Environment environment, FunctionCall call, PairList args) { try { ArgumentIterator argIt = new ArgumentIterator(context, environment, args); SEXP s0 = argIt.evalNext(); SEXP s1 = argIt.evalNext(); SEXP s2 = argIt.evalNext(); if (!argIt.hasNext()) { return this.doApply(context, environment, s0, s1, s2); } throw new EvalException("attr<-: too many arguments, expected at most 3."); } catch (ArgumentException e) { throw new EvalException(context, "Invalid argument: %s. Expected:\n\tattr<-(any, character(1), any)", e.getMessage()); } catch (EvalException e) { e.initContext(context); throw e; } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new EvalException(e); } } public static SEXP doApply(Context context, Environment environment, FunctionCall call, String[] argNames, SEXP[] args) { try { if ((args.length) == 3) { return doApply(context, environment, args[ 0 ], args[ 1 ], args[ 2 ]); } } catch (EvalException e) { e.initContext(context); throw e; } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new EvalException(e); } throw new EvalException("attr<-: max arity is 3"); } public SEXP apply(Context context, Environment environment, FunctionCall call, String[] argNames, SEXP[] args) { return R$primitive$attr$assign.doApply(context, environment, call, argNames, args); } public static SEXP doApply(Context context, Environment environment, SEXP arg0, SEXP arg1, SEXP arg2) throws Exception { if (((arg0 instanceof SEXP)&&((arg1 instanceof Vector)&&StringVector.VECTOR_TYPE.isWiderThanOrEqualTo(((Vector) arg1))))&&(arg2 instanceof SEXP)) { return Attributes.setAttribute(((SEXP) arg0), WrapperRuntime.convertToString(arg1), ((SEXP) arg2)); } else { throw new EvalException(String.format("Invalid argument:\n\tattr<-(%s, %s, %s)\n\tExpected:\n\tattr<-(any, character(1), any)", arg0 .getTypeName(), arg1 .getTypeName(), arg2 .getTypeName())); } } }<|fim▁end|>
extends BuiltinFunction
<|file_name|>jupyterhub_config.py<|end_file_name|><|fim▁begin|># THIS FILE IS CONTROLLED BY ELASTICLUSTER # local modifications will be overwritten # the next time `elasticluster setup` is run! # # # Configuration file for jupyterhub. # #------------------------------------------------------------------------------ # JupyterHub(Application) configuration #------------------------------------------------------------------------------ ## An Application for starting a Multi-User Jupyter Notebook server. ## Grant admin users permission to access single-user servers. # # Users should be properly informed if this is enabled. #c.JupyterHub.admin_access = False ## Class for authenticating users. # c.JupyterHub.authenticator_class = 'jupyterhub.auth.PAMAuthenticator' ## The base URL of the entire application c.JupyterHub.base_url = '/' ## Whether to shutdown the proxy when the Hub shuts down. # #c.JupyterHub.cleanup_proxy = True ## Whether to shutdown single-user servers when the Hub shuts down. # #c.JupyterHub.cleanup_servers = True ## The config file to load c.JupyterHub.config_file = '/etc/jupyterhub/jupyterhub_config.py' ## Number of days for a login cookie to be valid. Default is two weeks. # #c.JupyterHub.cookie_max_age_days = 14 ## The cookie secret to use to encrypt cookies. # # Loaded from the JPY_COOKIE_SECRET env variable by default. c.JupyterHub.cookie_secret = open('/var/lib/jupyterhub/jupyterhub_cookie_secret', 'rb').read().strip() ## File in which to store the cookie secret. c.JupyterHub.cookie_secret_file = 'jupyterhub_cookie_secret' ## The location of jupyterhub data files (e.g. /usr/local/share/jupyter/hub) c.JupyterHub.data_files_path = '/opt/anaconda3/share/jupyter/hub' ## Include any kwargs to pass to the database connection. See # sqlalchemy.create_engine for details. #c.JupyterHub.db_kwargs = {} ## url for the database. e.g. `sqlite:///jupyterhub.sqlite` c.JupyterHub.db_url = 'sqlite:////var/lib/jupyterhub/jupyterhub.sqlite' ## show debug output in configurable-http-proxy #c.JupyterHub.debug_proxy = False ## File to write PID Useful for daemonizing jupyterhub. c.JupyterHub.pid_file = '/var/run/jupyterhub.pid' ## The public facing port of the proxy c.JupyterHub.port = 443 ## The Proxy Auth token. # # Loaded from the CONFIGPROXY_AUTH_TOKEN env variable by default. c.JupyterHub.proxy_auth_token = open('/var/lib/jupyterhub/jupyterhub_proxy_auth_token', 'rb').read().strip() ## The command to start the http proxy. # # Only override if configurable-http-proxy is not on your PATH c.JupyterHub.proxy_cmd = ['/usr/local/lib/node_modules/configurable-http-proxy/bin/configurable-http-proxy'] ## Dict of token:servicename to be loaded into the database. # # Allows ahead-of-time generation of API tokens for use by externally managed # services. #c.JupyterHub.service_tokens = {} ## List of service specification dictionaries. # # A service # # For instance:: # # services = [ # { # 'name': 'cull_idle', # 'command': ['/path/to/cull_idle_servers.py'], # }, # { # 'name': 'formgrader', # 'url': 'http://127.0.0.1:1234', # 'token': 'super-secret', # 'environment': # } # ] #c.JupyterHub.services = [] ## The class to use for spawning single-user servers. # c.JupyterHub.spawner_class = 'jupyterhub.spawner.LocalProcessSpawner' ## Path to SSL certificate file for the public facing interface of the proxy # # Use with ssl_key c.JupyterHub.ssl_cert = '/etc/jupyterhub/jupyterhub.crt.pem' ## Path to SSL key file for the public facing interface of the proxy #<|fim▁hole|># Spawner(LoggingConfigurable) configuration #------------------------------------------------------------------------------ ## The command used for starting the single-user server. # # Provide either a string or a list containing the path to the startup script # command. Extra arguments, other than this path, should be provided via `args`. # # This is usually set if you want to start the single-user server in a different # python environment (with virtualenv/conda) than JupyterHub itself. # # Some spawners allow shell-style expansion here, allowing you to use # environment variables. Most, including the default, do not. Consult the # documentation for your spawner to verify! c.Spawner.cmd = ['/opt/anaconda3/bin/jupyterhub-singleuser'] ## Minimum number of cpu-cores a single-user notebook server is guaranteed to # have available. # # If this value is set to 0.5, allows use of 50% of one CPU. If this value is # set to 2, allows use of up to 2 CPUs. # # Note that this needs to be supported by your spawner for it to work. #c.Spawner.cpu_guarantee = None ## Maximum number of cpu-cores a single-user notebook server is allowed to use. # # If this value is set to 0.5, allows use of 50% of one CPU. If this value is # set to 2, allows use of up to 2 CPUs. # # The single-user notebook server will never be scheduled by the kernel to use # more cpu-cores than this. There is no guarantee that it can access this many # cpu-cores. # # This needs to be supported by your spawner for it to work. #c.Spawner.cpu_limit = None ## Enable debug-logging of the single-user server #c.Spawner.debug = False ## The URL the single-user server should start in. # # `{username}` will be expanded to the user's username # # Example uses: # - You can set `notebook_dir` to `/` and `default_url` to `/home/{username}` to allow people to # navigate the whole filesystem from their notebook, but still start in their home directory. # - You can set this to `/lab` to have JupyterLab start by default, rather than Jupyter Notebook. #c.Spawner.default_url = '' ## Disable per-user configuration of single-user servers. # # When starting the user's single-user server, any config file found in the # user's $HOME directory will be ignored. # # Note: a user could circumvent this if the user modifies their Python # environment, such as when they have their own conda environments / virtualenvs # / containers. #c.Spawner.disable_user_config = False ## Whitelist of environment variables for the single-user server to inherit from # the JupyterHub process. # # This whitelist is used to ensure that sensitive information in the JupyterHub # process's environment (such as `CONFIGPROXY_AUTH_TOKEN`) is not passed to the # single-user server's process. #c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL'] ## Extra environment variables to set for the single-user server's process. # # Environment variables that end up in the single-user server's process come from 3 sources: # - This `environment` configurable # - The JupyterHub process' environment variables that are whitelisted in `env_keep` # - Variables to establish contact between the single-user notebook and the hub (such as JUPYTERHUB_API_TOKEN) # # The `enviornment` configurable should be set by JupyterHub administrators to # add installation specific environment variables. It is a dict where the key is # the name of the environment variable, and the value can be a string or a # callable. If it is a callable, it will be called with one parameter (the # spawner instance), and should return a string fairly quickly (no blocking # operations please!). # # Note that the spawner class' interface is not guaranteed to be exactly same # across upgrades, so if you are using the callable take care to verify it # continues to work after upgrades! #c.Spawner.environment = {} ## Timeout (in seconds) before giving up on a spawned HTTP server # # Once a server has successfully been spawned, this is the amount of time we # wait before assuming that the server is unable to accept connections. #c.Spawner.http_timeout = 30 ## The IP address (or hostname) the single-user server should listen on. # # The JupyterHub proxy implementation should be able to send packets to this # interface. #c.Spawner.ip = '127.0.0.1' ## Minimum number of bytes a single-user notebook server is guaranteed to have # available. # # Allows the following suffixes: # - K -> Kilobytes # - M -> Megabytes # - G -> Gigabytes # - T -> Terabytes # # This needs to be supported by your spawner for it to work. #c.Spawner.mem_guarantee = None ## Maximum number of bytes a single-user notebook server is allowed to use. # # Allows the following suffixes: # - K -> Kilobytes # - M -> Megabytes # - G -> Gigabytes # - T -> Terabytes # # If the single user server tries to allocate more memory than this, it will # fail. There is no guarantee that the single-user notebook server will be able # to allocate this much memory - only that it can not allocate more than this. # # This needs to be supported by your spawner for it to work. #c.Spawner.mem_limit = None ## Path to the notebook directory for the single-user server. # # The user sees a file listing of this directory when the notebook interface is # started. The current interface does not easily allow browsing beyond the # subdirectories in this directory's tree. # # `~` will be expanded to the home directory of the user, and {username} will be # replaced with the name of the user. # # Note that this does *not* prevent users from accessing files outside of this # path! They can do so with many other means. c.Spawner.notebook_dir = '~' #------------------------------------------------------------------------------ # Authenticator(LoggingConfigurable) configuration #------------------------------------------------------------------------------ ## Base class for implementing an authentication provider for JupyterHub ## Set of users that will have admin rights on this JupyterHub. # # Admin users have extra privilages: # - Use the admin panel to see list of users logged in # - Add / remove users in some authenticators # - Restart / halt the hub # - Start / stop users' single-user servers # - Can access each individual users' single-user server (if configured) # # Admin access should be treated the same way root access is. # # Defaults to an empty set, in which case no user has admin access. #c.Authenticator.admin_users = set() ## Whitelist of usernames that are allowed to log in. # # Use this with supported authenticators to restrict which users can log in. # This is an additional whitelist that further restricts users, beyond whatever # restrictions the authenticator has in place. # # If empty, does not perform any additional restriction. #c.Authenticator.whitelist = set() #------------------------------------------------------------------------------ # LocalAuthenticator(Authenticator) configuration #------------------------------------------------------------------------------ ## Base class for Authenticators that work with local Linux/UNIX users # # Checks for local users, and can attempt to create them if they exist. ## The command to use for creating users as a list of strings # # For each element in the list, the string USERNAME will be replaced with the # user's username. The username will also be appended as the final argument. # # For Linux, the default value is: # # ['adduser', '-q', '--gecos', '""', '--disabled-password'] # # To specify a custom home directory, set this to: # # ['adduser', '-q', '--gecos', '""', '--home', '/customhome/USERNAME', '-- # disabled-password'] # # This will run the command: # # adduser -q --gecos "" --home /customhome/river --disabled-password river # # when the user 'river' is created. #c.LocalAuthenticator.add_user_cmd = [] ## If set to True, will attempt to create local system users if they do not exist # already. # # Supports Linux and BSD variants only. c.LocalAuthenticator.create_system_users = False ## Whitelist all users from this UNIX group. # # This makes the username whitelist ineffective. #c.LocalAuthenticator.group_whitelist = set() #------------------------------------------------------------------------------ # PAMAuthenticator(LocalAuthenticator) configuration #------------------------------------------------------------------------------ ## Authenticate local UNIX users with PAM ## The text encoding to use when communicating with PAM #c.PAMAuthenticator.encoding = 'utf8' ## Whether to open a new PAM session when spawners are started. # # This may trigger things like mounting shared filsystems, loading credentials, # etc. depending on system configuration, but it does not always work. # # If any errors are encountered when opening/closing PAM sessions, this is # automatically set to False. #c.PAMAuthenticator.open_sessions = True ## The name of the PAM service to use for authentication #c.PAMAuthenticator.service = 'login'<|fim▁end|>
# Use with ssl_cert c.JupyterHub.ssl_key = '/etc/jupyterhub/jupyterhub.key.pem' #------------------------------------------------------------------------------
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
__author__ = 'ralmn'
<|file_name|>seatmyself.py<|end_file_name|><|fim▁begin|>############################## # # # Instructions # # # ############################## # To run, use the following command: # $ python seating.py <input_file> # where <input_file> is the filename with the question's input import sys import re # Check to make sure correct number of arguments supplied<|fim▁hole|>if (len(sys.argv) != 2): print('Invalid number of arguments!') sys.exit() # Read the input from the file provided as argument input_file = open(sys.argv[1]) puzzle_input = input_file.readlines() input_file.close() # Regular expression to get the names and happiness changes of each pair regex_happiness = re.compile(r'(\w+) would (gain|lose) (\d+) happiness units by sitting next to (\w+).') happiness = {} possibilities = [] # For every line in input for line in puzzle_input: info = re.match(regex_happiness, line) # Check if the person is gaining or losing happiness mult = 1 if info.group(2) == 'lose': mult = -1 # Add the person and their neighbor as an entry in the dict if info.group(1) in happiness: happiness[info.group(1)][info.group(4)] = mult * int(info.group(3)) else: happiness[info.group(1)] = {info.group(4): mult * int(info.group(3))} # Adding myself to the table happiness['Joseph'] = {} for person in happiness: if not person == 'Joseph': happiness[person]['Joseph'] = 0 happiness['Joseph'][person] = 0 # Finds all the possibilities from a person to neighbors which have not been tried so far # and adds the total change in happiness together def calc_possibilities(first_person, person, visited, total_so_far): global happiness global possibilities global best_so_far # Make a copy of the list and add a new entry visited = visited[:] visited.append(person) # If all of the people are in the list, add the total change in happiness to the possibilities if len(visited) == len(happiness): total_so_far += happiness[first_person][person] + happiness[person][first_person] possibilities.append(total_so_far) # For each person the person can sit beside for neighbor in happiness[person]: # If they're already in the list, skip them if neighbor in visited: continue # Get all the possibilities of the next person's neighbor calc_possibilities(first_person, neighbor, visited, total_so_far + happiness[neighbor][person] + happiness[person][neighbor]) # Start with each person and go around the table, trying every combination for person in happiness: for neighbor in happiness[person]: calc_possibilities(person, neighbor, [person], happiness[person][neighbor] + happiness[neighbor][person]) # Print the overall best possibility print('The best seating arrangement has a combined happiness of', max(possibilities))<|fim▁end|>
<|file_name|>deps.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict, defaultdict, deque from typing import List, Dict, Deque from acbs.find import find_package from acbs.parser import ACBSPackageInfo, check_buildability # package information cache pool: Dict[str, ACBSPackageInfo] = {} def tarjan_search(packages: 'OrderedDict[str, ACBSPackageInfo]', search_path: str) -> List[List[ACBSPackageInfo]]: """This function describes a Tarjan's strongly connected components algorithm. The resulting list of ACBSPackageInfo are sorted topologically as a byproduct of the algorithm """ # Initialize state trackers lowlink: Dict[str, int] = defaultdict(lambda: -1) index: Dict[str, int] = defaultdict(lambda: -1) stackstate: Dict[str, bool] = defaultdict(bool) stack: Deque[str] = deque() results: List[List[ACBSPackageInfo]] = [] packages_list: List[str] = [i for i in packages] pool.update(packages) for i in packages_list: if index[i] == -1: # recurse on each package that is not yet visited strongly_connected(search_path, packages_list, results, packages, i, lowlink, index, stackstate, stack) return results <|fim▁hole|> new_installables = [] for d in package.installables: # skip self-dependency if d == package.name: new_installables.append(d) continue try: packages_list.index(d) package.deps.append(d) except ValueError: new_installables.append(d) package.installables = new_installables return package def strongly_connected(search_path: str, packages_list: List[str], results: list, packages: 'OrderedDict[str, ACBSPackageInfo]', vert: str, lowlink: Dict[str, int], index: Dict[str, int], stackstate: Dict[str, bool], stack: Deque[str], depth=0): # update depth indices index[vert] = depth lowlink[vert] = depth depth += 1 stackstate[vert] = True stack.append(vert) # search package begin print(f'[{len(results) + 1}/{len(pool)}] {vert}\t\t\r', end='', flush=True) current_package = packages.get(vert) if current_package is None: package = pool.get(vert) or find_package(vert, search_path) if not package: raise ValueError( f'Package {vert} not found') if isinstance(package, list): for s in package: if vert == s.name: current_package = s pool[s.name] = s continue pool[s.name] = s packages_list.append(s.name) else: current_package = package pool[vert] = current_package assert current_package is not None # first check if this dependency is buildable # when `required_by` argument is present, it will raise an exception when the dependency is unbuildable. check_buildability(current_package, stack[-2] if len(stack) > 1 else '<unknown>') # search package end # Look for adjacent packages (dependencies) for p in current_package.deps: if index[p] == -1: # recurse on unvisited packages strongly_connected(search_path, packages_list, results, packages, p, lowlink, index, stackstate, stack, depth) lowlink[vert] = min(lowlink[p], lowlink[vert]) # adjacent package is in the stack which means it is part of a loop elif stackstate[p] is True: lowlink[vert] = min(lowlink[p], index[vert]) w = '' result = [] # if this is a root vertex if lowlink[vert] == index[vert]: # the current stack contains the vertices that belong to the same loop # if the stack only contains one vertex, then there is no loop there while w != vert: w = stack.pop() result.append(pool[w]) stackstate[w] = False results.append(result)<|fim▁end|>
def prepare_for_reorder(package: ACBSPackageInfo, packages_list: List[str]) -> ACBSPackageInfo: """This function prepares the package for reordering. The idea is to move the installable dependencies which are in the build list to the "uninstallable" list. """
<|file_name|>SmartHouseConfigReadError.java<|end_file_name|><|fim▁begin|>package si.majeric.smarthouse.xstream.dao; public class SmartHouseConfigReadError extends RuntimeException { private static final long serialVersionUID = 1L; public SmartHouseConfigReadError(Exception e) { super(e);<|fim▁hole|><|fim▁end|>
} }
<|file_name|>common.py<|end_file_name|><|fim▁begin|>import string import random import json from collections import defaultdict from django.http import HttpResponse from django.shortcuts import render_to_response from django.template.context import RequestContext from catmaid.fields import Double3D from catmaid.models import Log, NeuronSearch, CELL_BODY_CHOICES, \ SORT_ORDERS_DICT, Relation, Class, ClassInstance, \ ClassInstanceClassInstance def _create_relation(user, project_id, relation_id, instance_a_id, instance_b_id): relation = ClassInstanceClassInstance() relation.user = user relation.project_id = project_id relation.relation_id = relation_id relation.class_instance_a_id = instance_a_id relation.class_instance_b_id = instance_b_id relation.save() return relation def insert_into_log(project_id, user_id, op_type, location=None, freetext=None): """ Inserts a new entry into the log table. If the location parameter is passed, it is expected to be an iteratable (list, tuple). """ # valid operation types operation_type_array = [ "rename_root", "create_neuron", "rename_neuron", "remove_neuron", "move_neuron", "create_group", "rename_group", "remove_group", "move_group", "create_skeleton", "rename_skeleton", "remove_skeleton", "move_skeleton", "split_skeleton", "join_skeleton", "reroot_skeleton", "change_confidence" ] if not op_type in operation_type_array: return {'error': 'Operation type {0} not valid'.format(op_type)} new_log = Log() new_log.user_id = user_id new_log.project_id = project_id new_log.operation_type = op_type if not location is None: new_log.location = Double3D(*location) if not freetext is None: new_log.freetext = freetext new_log.save() # $q = $db->insertIntoId('log', $data ); # echo json_encode( array ( 'error' => "Failed to insert operation $op_type for user $uid in project %pid." ) ); # Tip from: http://lincolnloop.com/blog/2008/may/10/getting-requestcontext-your-templates/ # Required because we need a RequestContext, not just a Context - the # former looks at TEMPLATE_CONTEXT_PROCESSORS, while the latter doesn't. def my_render_to_response(req, *args, **kwargs): kwargs['context_instance'] = RequestContext(req) return render_to_response(*args, **kwargs) def json_error_response(message): """ When an operation fails we should return a JSON dictionary with the key 'error' set to an error message. This is a helper method to return such a structure: """ return HttpResponse(json.dumps({'error': message}), content_type='text/json') def order_neurons(neurons, order_by=None): column, reverse = 'name', False if order_by and (order_by in SORT_ORDERS_DICT): column, reverse, _ = SORT_ORDERS_DICT[order_by] if column == 'name': neurons.sort(key=lambda x: x.name) elif column == 'gal4': neurons.sort(key=lambda x: x.cached_sorted_lines_str) elif column == 'cell_body': neurons.sort(key=lambda x: x.cached_cell_body) else: raise Exception("Unknown column (%s) in order_neurons" % (column,)) if reverse: neurons.reverse() return neurons # Both index and visual_index take a request and kwargs and then # return a list of neurons and a NeuronSearch form: def get_form_and_neurons(request, project_id, kwargs): # If we've been passed parameters in a REST-style GET request, # create a form from them. Otherwise, if it's a POST request, # create the form from the POST parameters. Otherwise, it's a # plain request, so create the default search form. rest_keys = ('search', 'cell_body_location', 'order_by') if any((x in kwargs) for x in rest_keys): kw_search = kwargs.get('search', None) or "" kw_cell_body_choice = kwargs.get('cell_body_location', None) or "a" kw_order_by = kwargs.get('order_by', None) or 'name' search_form = NeuronSearch({'search': kw_search, 'cell_body_location': kw_cell_body_choice, 'order_by': kw_order_by}) elif request.method == 'POST': search_form = NeuronSearch(request.POST) else: search_form = NeuronSearch({'search': '', 'cell_body_location': 'a', 'order_by': 'name'}) if search_form.is_valid(): search = search_form.cleaned_data['search'] cell_body_location = search_form.cleaned_data['cell_body_location'] order_by = search_form.cleaned_data['order_by'] else: search = '' cell_body_location = 'a' order_by = 'name' cell_body_choices_dict = dict(CELL_BODY_CHOICES) all_neurons = ClassInstance.objects.filter( project__id=project_id, class_column__class_name='neuron', name__icontains=search).exclude(name='orphaned pre').exclude(name='orphaned post') if cell_body_location != 'a': location = cell_body_choices_dict[cell_body_location] all_neurons = all_neurons.filter( project__id=project_id, cici_via_a__relation__relation_name='has_cell_body', cici_via_a__class_instance_b__name=location) cici_qs = ClassInstanceClassInstance.objects.filter( project__id=project_id, relation__relation_name='has_cell_body', class_instance_a__class_column__class_name='neuron', class_instance_b__class_column__class_name='cell_body_location') neuron_id_to_cell_body_location = dict( (x.class_instance_a.id, x.class_instance_b.name) for x in cici_qs) neuron_id_to_driver_lines = defaultdict(list) for cici in ClassInstanceClassInstance.objects.filter( project__id=project_id, relation__relation_name='expresses_in', class_instance_a__class_column__class_name='driver_line', class_instance_b__class_column__class_name='neuron'): neuron_id_to_driver_lines[cici.class_instance_b.id].append(cici.class_instance_a) all_neurons = list(all_neurons) for n in all_neurons: n.cached_sorted_lines = sorted( neuron_id_to_driver_lines[n.id], key=lambda x: x.name) n.cached_sorted_lines_str = ", ".join(x.name for x in n.cached_sorted_lines) n.cached_cell_body = neuron_id_to_cell_body_location.get(n.id, 'Unknown') all_neurons = order_neurons(all_neurons, order_by) return (all_neurons, search_form) # TODO After all PHP functions have been replaced and all occurrence of # this odd behavior have been found, change callers to not depend on this # legacy functionality. def makeJSON_legacy_list(objects): ''' The PHP function makeJSON, when operating on a list of rows as results, will output a JSON list of key-values, with keys being integers from 0 and upwards. We return a dict with the same structure so that it looks the same when used with json.dumps. ''' i = 0 res = {} for o in objects: res[i] = o i += 1 return res def cursor_fetch_dictionary(cursor): "Returns all rows from a cursor as a dict" desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ] def get_relation_to_id_map(project_id): return {rname: ID for rname, ID in Relation.objects.filter(project=project_id).values_list("relation_name", "id")} def get_class_to_id_map(project_id): return {cname: ID for cname, ID in Class.objects.filter(project=project_id).values_list("class_name", "id")} def urljoin(a, b):<|fim▁hole|> a = a + '/' if b[0] == '/': b = b[1:] return a + b def id_generator(size=6, chars=string.ascii_lowercase + string.digits): """ Creates a random string of the specified length. """ return ''.join(random.choice(chars) for x in range(size))<|fim▁end|>
""" Joins to URL parts a and b while making sure this exactly one slash inbetween. """ if a[-1] != '/':
<|file_name|>svgreader.cpp<|end_file_name|><|fim▁begin|>/** * Phoebe DOM Implementation. * * This is a C++ approximation of the W3C DOM model, which follows * fairly closely the specifications in the various .idl files, copies of * which are provided for reference. Most important is this one: * * http://www.w3.org/TR/2004/REC-DOM-Level-3-Core-20040407/idl-definitions.html * * Authors: * Bob Jamison * * Copyright (C) 2005-2008 Bob Jamison * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * * ======================================================================= * NOTES * * */ #include "svgreader.h" #include "dom/cssreader.h" #include "dom/ucd.h" #include "xmlreader.h" #include <stdarg.h><|fim▁hole|> namespace org { namespace w3c { namespace dom { namespace svg { //######################################################################### //# M E S S A G E S //######################################################################### /** * */ void SVGReader::error(char const *fmt, ...) { va_list args; fprintf(stderr, "SVGReader:error: "); va_start(args, fmt); vfprintf(stderr, fmt, args); va_end(args) ; fprintf(stderr, "\n"); } /** * */ void SVGReader::trace(char const *fmt, ...) { va_list args; fprintf(stdout, "SVGReader: "); va_start(args, fmt); vfprintf(stdout, fmt, args); va_end(args) ; fprintf(stdout, "\n"); } //######################################################################### //# P A R S I N G //######################################################################### /** * Get the character at the position and record the fact */ XMLCh SVGReader::get(int p) { if (p >= parselen) return 0; XMLCh ch = parsebuf[p]; //printf("%c", ch); lastPosition = p; return ch; } /** * Test if the given substring exists at the given position * in parsebuf. Use get() in case of out-of-bounds */ bool SVGReader::match(int pos, char const *str) { while (*str) { if (get(pos++) != (XMLCh) *str++) return false; } return true; } /** * */ int SVGReader::skipwhite(int p) { while (p < parselen) { //# XML COMMENT if (match(p, "<!--")) { p+=4; bool done=false; while (p<parselen) { if (match(p, "-->")) { p+=3; done=true; break; } p++; } lastPosition = p; if (!done) { error("unterminated <!-- .. --> comment"); return -1; } } //# C comment else if (match(p, "/*")) { p+=2; bool done=false; while (p<parselen) { if (match(p, "*/")) { p+=2; done=true; break; } p++; } lastPosition = p; if (!done) { error("unterminated /* .. */ comment"); return -1; } } else if (!uni_is_space(get(p))) break; else p++; } lastPosition = p; return p; } /** * get a word from the buffer */ int SVGReader::getWord(int p, DOMString &result) { XMLCh ch = get(p); if (!uni_is_letter(ch)) return p; DOMString str; str.push_back(ch); p++; while (p < parselen) { ch = get(p); if (uni_is_letter_or_digit(ch) || ch=='-' || ch=='_') { str.push_back(ch); p++; } else if (ch == '\\') { p+=2; } else break; } result = str; return p; } # if 0 /** * get a word from the buffer */ int SVGReader::getNumber(int p0, double &result) { int p=p0; DOMString str; //allow sign if (get(p) == '-') { p++; } while (p < parselen) { XMLCh ch = get(p); if (ch<'0' || ch>'9') break; str.push_back(ch); p++; } if (get(p) == '.' && get(p+1)>='0' && get(p+1)<='9') { p++; str.push_back('.'); while (p < parselen) { XMLCh ch = get(p); if (ch<'0' || ch>'9') break; str.push_back(ch); p++; } } if (p>p0) { char *start = (char *)str.c_str(); char *end = NULL; double val = strtod(start, &end); if (end > start) { result = val; return p; } } //not a number return p0; } #endif /** * get a word from the buffer */ int SVGReader::getNumber(int p0, double &result) { int p=p0; char buf[64]; int i; for (i=0 ; i<63 && p<parselen ; i++) { buf[i] = (char) get(p++); } buf[i] = '\0'; char *start = buf; char *end = NULL; double val = strtod(start, &end); if (end > start) { result = val; int count = (int)(end - start); p = p0 + count; return p; } //not a number return p0; } bool SVGReader::parseTransform(const DOMString &str) { parsebuf = str; parselen = str.size(); //printf("transform:%s\n", str.c_str()); SVGTransformList transformList; int p = 0; while (p < parselen) { p = skipwhite(p); DOMString name; int p2 = getWord(p, name); if (p2<0) return false; if (p2<=p) { error("transform: need transform name"); //return false; break; } p = p2; //printf("transform name:%s\n", name.c_str()); //######### MATRIX if (name == "matrix") { p = skipwhite(p); if (get(p++) != '(') { error("matrix transform needs opening '('"); return false; } int nrVals = 0; double vals[6]; bool seenBrace = false; while (p < parselen && nrVals < 6) { p = skipwhite(p); double val = 0.0; p2 = getNumber(p, val); if (p2<0) return false; if (p2<=p) { error("matrix() expected number"); return false; } vals[nrVals++] = val; p = skipwhite(p2); XMLCh ch = get(p); if (ch == ',') { p++; p = skipwhite(p); ch = get(p); } if (ch == ')') { seenBrace = true; p++; break; } } if (!seenBrace) { error("matrix() needs closing brace"); return false; } if (nrVals != 6) { error("matrix() requires exactly 6 arguments"); return false; } //We got our arguments //printf("translate: %f %f %f %f %f %f\n", // vals[0], vals[1], vals[2], vals[3], vals[4], vals[5]); SVGMatrix matrix(vals[0], vals[1], vals[2], vals[3], vals[4], vals[5]); SVGTransform transform; transform.setMatrix(matrix); transformList.appendItem(transform); } //######### TRANSLATE else if (name == "translate") { p = skipwhite(p); if (get(p++) != '(') { error("matrix transform needs opening '('"); return false; } p = skipwhite(p); double x = 0.0; p2 = getNumber(p, x); if (p2<0) return false; if (p2<=p) { error("translate() expected 'x' value"); return false; } p = skipwhite(p2); if (get(p) == ',') { p++; p = skipwhite(p); } double y = 0.0; p2 = getNumber(p, y); if (p2<0) return false; if (p2<=p) //no y specified. use default y = 0.0; p = skipwhite(p2); if (get(p++) != ')') { error("translate() needs closing ')'"); return false; } //printf("translate: %f %f\n", x, y); SVGTransform transform; transform.setTranslate(x, y); transformList.appendItem(transform); } //######### SCALE else if (name == "scale") { p = skipwhite(p); if (get(p++) != '(') { error("scale transform needs opening '('"); return false; } p = skipwhite(p); double x = 0.0; p2 = getNumber(p, x); if (p2<0) return false; if (p2<=p) { error("scale() expected 'x' value"); return false; } p = skipwhite(p2); if (get(p) == ',') { p++; p = skipwhite(p); } double y = 0.0; p2 = getNumber(p, y); if (p2<0) return false; if (p2<=p) //no y specified. use default y = x; // y is same as x. uniform scaling p = skipwhite(p2); if (get(p++) != ')') { error("scale() needs closing ')'"); return false; } //printf("scale: %f %f\n", x, y); SVGTransform transform; transform.setScale(x, y); transformList.appendItem(transform); } //######### ROTATE else if (name == "rotate") { p = skipwhite(p); if (get(p++) != '(') { error("rotate transform needs opening '('"); return false; } p = skipwhite(p); double angle = 0.0; p2 = getNumber(p, angle); if (p2<0) return false; if (p2<=p) { error("rotate() expected 'angle' value"); return false; } p = skipwhite(p2); if (get(p) == ',') { p++; p = skipwhite(p); } double cx = 0.0; double cy = 0.0; p2 = getNumber(p, cx); if (p2>p) { p = skipwhite(p2); if (get(p) == ',') { p++; p = skipwhite(p); } p2 = getNumber(p, cy); if (p2<0) return false; if (p2<=p) { error("rotate() arguments should be either rotate(angle) or rotate(angle, cx, cy)"); return false; } p = skipwhite(p2); } if (get(p++) != ')') { error("rotate() needs closing ')'"); return false; } //printf("rotate: %f %f %f\n", angle, cx, cy); SVGTransform transform; transform.setRotate(angle, cx, cy); transformList.appendItem(transform); } //######### SKEWX else if (name == "skewX") { p = skipwhite(p); if (get(p++) != '(') { error("skewX transform needs opening '('"); return false; } p = skipwhite(p); double x = 0.0; p2 = getNumber(p, x); if (p2<0) return false; if (p2<=p) { error("skewX() expected 'x' value"); return false; } p = skipwhite(p2); if (get(p++) != ')') { error("skewX() needs closing ')'"); return false; } //printf("skewX: %f\n", x); SVGTransform transform; transform.setSkewX(x); transformList.appendItem(transform); } //######### SKEWY else if (name == "skewY") { p = skipwhite(p); if (get(p++) != '(') { error("skewY transform needs opening '('"); return false; } p = skipwhite(p); double y = 0.0; p2 = getNumber(p, y); if (p2<0) return false; if (p2<=p) { error("skewY() expected 'y' value"); return false; } p = skipwhite(p2); if (get(p++) != ')') { error("skewY() needs closing ')'"); return false; } //printf("skewY: %f\n", y); SVGTransform transform; transform.setSkewY(y); transformList.appendItem(transform); } //### NONE OF THE ABOVE else { error("unknown transform type:'%s'", name.c_str()); } p = skipwhite(p); XMLCh ch = get(p); if (ch == ',') { p++; p = skipwhite(p); } }//WHILE p<parselen return true; } /** * */ bool SVGReader::parseElement(SVGElementImplPtr parent, ElementImplPtr sourceElem) { if (!parent) { error("NULL dest element"); return false; } if (!sourceElem) { error("NULL source element"); return false; } DOMString namespaceURI = sourceElem->getNamespaceURI(); //printf("namespaceURI:%s\n", namespaceURI.c_str()); DOMString tagName = sourceElem->getTagName(); printf("tag name:%s\n", tagName.c_str()); ElementPtr newElement = doc->createElementNS(namespaceURI, tagName); if (!newElement) { return false; } NamedNodeMap &attrs = sourceElem->getAttributes(); for (unsigned int i=0 ; i<attrs.getLength() ; i++) { NodePtr n = attrs.item(i); newElement->setAttribute(n->getNodeName(), n->getNodeValue());//should be exception here } parent->appendChild(newElement); NodeList children = sourceElem->getChildNodes(); int nodeCount = children.getLength(); for (int i=0 ; i<nodeCount ; i++) { NodePtr child = children.item(i); int typ = child->getNodeType(); if (typ == Node::TEXT_NODE) { NodePtr newNode = doc->createTextNode(child->getNodeValue()); parent->appendChild(newNode); } else if (typ == Node::CDATA_SECTION_NODE) { NodePtr newNode = doc->createCDATASection(child->getNodeValue()); parent->appendChild(newNode); } else if (newElement.get() && typ == Node::ELEMENT_NODE) { //ElementImplPtr childElement = dynamic_cast<ElementImpl *>(child.get()); //parseElement(newElement, childElement); } } return true; } /** * */ SVGDocumentPtr SVGReader::parse(const DocumentPtr src) { if (!src) { error("NULL source document"); return NULL; } DOMImplementationImpl impl; doc = new SVGDocumentImpl(&impl, SVG_NAMESPACE, "svg" , NULL); SVGElementImplPtr destElem = dynamic_pointer_cast<SVGElementImpl, SVGElement>(doc->getRootElement()); ElementImplPtr srcElem = dynamic_pointer_cast<ElementImpl, Element>(src->getDocumentElement()); if (!parseElement(destElem, srcElem)) { return NULL; } return doc; } /** * */ SVGDocumentPtr SVGReader::parse(const DOMString &buf) { /* remember, smartptrs are null-testable*/ SVGDocumentPtr svgdoc; XmlReader parser; DocumentPtr doc = parser.parse(buf); if (!doc) { return svgdoc; } svgdoc = parse(doc); return svgdoc; } /** * */ SVGDocumentPtr SVGReader::parseFile(const DOMString &fileName) { /* remember, smartptrs are null-testable*/ SVGDocumentPtr svgdoc; XmlReader parser; DocumentPtr doc = parser.parseFile(fileName); if (!doc) { error("Could not load xml doc"); return svgdoc; } svgdoc = parse(doc); return svgdoc; } } //namespace svg } //namespace dom } //namespace w3c } //namespace org /*######################################################################### ## E N D O F F I L E #########################################################################*/<|fim▁end|>
<|file_name|>wiki.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 """ Wikipedia lookup plugin for Botty. Example invocations: <|fim▁hole|> #general | Me: wtf is water #general | Botty: wikipedia says, "Water (chemical formula: H2O) is a transparent fluid which forms the world's streams, lakes, oceans and rain, and is the major constituent of the fluids of organisms. As a chemical compound, a water molecule contains one oxygen and two hydrogen atoms that are connected by covalent bonds." """ import re import wikipedia from .utilities import BasePlugin class WikiPlugin(BasePlugin): def __init__(self, bot): super().__init__(bot) def on_message(self, message): text = self.get_message_text(message) if text is None: return False match = re.search(r"^\s*\b(?:what\s+is|what's|wtf\s+(?:is|are))\s+([^,\?]+|\"[^\"]+\")", text, re.IGNORECASE) if not match: return False query = self.sendable_text_to_text(match.group(1)) # get query as plain text in order to make things like < and > work (these are usually escaped) if query in {"this", "that", "going on", "up"}: return False # ignore these common false positive expressions # perform Wikipedia lookup try: self.respond_raw("wikipedia says, \"{}\"".format(wikipedia.summary(query, sentences=2))) except wikipedia.exceptions.DisambiguationError as e: # disambiguation page, list possibilities self.respond_raw("could be one of the following: {}".format("; ".join(e.args[1]))) except: self.respond_raw("dunno") return True<|fim▁end|>
#general | Me: what is fire #general | Botty: wikipedia says, "Fire is the rapid oxidation of a material in the exothermic chemical process of combustion, releasing heat, light, and various reaction products. Slower oxidative processes like rusting or digestion are not included by this definition." #general | Me: what's bismuth? #general | Botty: wikipedia says, "Bismuth is a chemical element with symbol Bi and atomic number 83. Bismuth, a pentavalent post-transition metal, chemically resembles arsenic and antimony. Elemental bismuth may occur naturally, although its sulfide and oxide form important commercial ores."
<|file_name|>code_location.go<|end_file_name|><|fim▁begin|>package codelocation<|fim▁hole|> "runtime" "runtime/debug" "strings" "github.com/mysza/go-service-template/Godeps/_workspace/src/github.com/onsi/ginkgo/types" ) func New(skip int) types.CodeLocation { _, file, line, _ := runtime.Caller(skip + 1) stackTrace := PruneStack(string(debug.Stack()), skip) return types.CodeLocation{FileName: file, LineNumber: line, FullStackTrace: stackTrace} } func PruneStack(fullStackTrace string, skip int) string { stack := strings.Split(fullStackTrace, "\n") if len(stack) > 2*(skip+1) { stack = stack[2*(skip+1):] } prunedStack := []string{} re := regexp.MustCompile(`\/ginkgo\/|\/pkg\/testing\/|\/pkg\/runtime\/`) for i := 0; i < len(stack)/2; i++ { if !re.Match([]byte(stack[i*2])) { prunedStack = append(prunedStack, stack[i*2]) prunedStack = append(prunedStack, stack[i*2+1]) } } return strings.Join(prunedStack, "\n") }<|fim▁end|>
import ( "regexp"
<|file_name|>markdown.rs<|end_file_name|><|fim▁begin|>// This code is heavily inspired by hoedown. // // Performance observations: //<|fim▁hole|> static SP: u8 = b' '; static NL: u8 = b'\n'; static CR: u8 = b'\r'; static TAB: u8 = b'\t'; static STAR: u8 = b'*'; static DASH: u8 = b'-'; static UNDERSCORE: u8 = b'_'; static TILDE: u8 = b'~'; static BACKTICK: u8 = b'`'; // // Skip up to three leading spaces. // // Up to three leading spaces are allowed for many elements. // // We don't need to care about a TAB here as in this position it is equivalent // to 4 spaces, which means that when we find a TAB here we would not parse the // corresponding element. // fn skip_initial_three_spaces<'a>(buf: &'a[u8])-> &'a[u8] { let mut buf = buf; if buf.head() == Some(&SP) { buf = buf.tail(); if buf.head() == Some(&SP) { buf = buf.tail(); if buf.head() == Some(&SP) { buf = buf.tail(); } } } return buf; } // // Return Some(`rem`) if the line is a horizontal rule, with `rem` being the // buf after the hrule. Otherwise return None. // fn is_hrule<'a>(buf: &'a[u8]) -> Option<&'a[u8]> { let buf = skip_initial_three_spaces(buf); let item = match buf.head() { Some(&c) if c == STAR || c == DASH || c == UNDERSCORE => c, _ => return None }; // The count of '*', '-' or '_' let mut cnt: uint = 0; // Counts the consumed spaces (and the final NL) let mut spc: uint = 0; for &ch in buf.iter() { if ch == item { cnt += 1; } else if ch == NL { spc += 1; break; } else if ch == SP { spc += 1; } else { return None; } } if cnt >= 3 { Some(buf.slice_from(cnt + spc)) } else { None } } #[test] fn test_is_hrule() { // examples as given on the markdown homepage assert!(is_hrule(b"* * *\n").is_some()); assert!(is_hrule(b"***\n").is_some()); assert!(is_hrule(b"*****\n").is_some()); assert!(is_hrule(b"- - -\n").is_some()); assert!(is_hrule(b"---------------------------------------\n").is_some()); // up to three spaces ignored assert!(is_hrule(b" ***\n").is_some()); assert!(is_hrule(b" ***\n").is_some()); assert!(is_hrule(b" ***\n").is_some()); // but not four, or a tab which is equivalent to four spaces assert!(is_hrule(b" ***\n").is_none()); assert!(is_hrule(b"\t***\n").is_none()); // need at least three assert!(is_hrule(b"*\n").is_none()); assert!(is_hrule(b"**\n").is_none()); assert!(is_hrule(b"* *\n").is_none()); assert!(is_hrule(b" * *\n").is_none()); // Also works without newline at the end assert!(is_hrule(b"* * *").is_some()); // And underscores also supported assert!(is_hrule(b"___").is_some()); assert!(is_hrule(b"______________").is_some()); assert!(is_hrule(b" ______________").is_some()); // Test if the remaining buf actually works. let s = b" * * *\nremaining"; let res = is_hrule(s); assert!(res.is_some()); assert_eq!(res.unwrap(), b"remaining"); } // // Return Some(`rem`) if the line is an empty line, with `rem` being the buf // after the empty line. Otherwise return None. // fn is_empty<'a>(buf: &'a[u8]) -> Option<&'a[u8]> { let mut cnt: uint = 0; for &ch in buf.iter() { if ch == NL { cnt += 1; break; } else if ch == CR { cnt += 1; } else if ch == SP { cnt += 1; } else if ch == TAB { cnt += 1; } else { return None; } } if cnt > 0 { Some(buf.slice_from(cnt)) } else { None } } #[test] fn test_is_empty() { assert!(is_empty(b"\n").is_some()); assert!(is_empty(b" \n").is_some()); assert!(is_empty(b" \t \n").is_some()); assert!(is_empty(b" \t \r\n").is_some()); assert!(is_empty(b" \t \nabc").is_some()); assert!(is_empty(b" \t ").is_some()); assert!(is_empty(b"a").is_none()); assert!(is_empty(b" a").is_none()); assert!(is_empty(b" a\n").is_none()); assert!(is_empty(b" \ta\n").is_none()); // Test if the remaining buf actually works. let s = b" \t\r\nremaining"; let res = is_empty(s); assert!(res.is_some()); assert_eq!(res.unwrap(), b"remaining"); } fn is_codefence<'a>(buf: &'a[u8]) -> Option<(&'a[u8], uint, u8)> { let buf = skip_initial_three_spaces(buf); let item = match buf.head() { Some(&c) if c == TILDE || c == BACKTICK => c, _ => return None }; // The count of '~' or '`' characters let mut cnt: uint = 0; for &ch in buf.iter() { if ch == item { cnt += 1; } else { break; } } if cnt >= 3 { Some((buf.slice_from(cnt), cnt, item)) } else { None } } #[test] fn test_is_codefence() { assert!(is_codefence(b"```").is_some()); assert!(is_codefence(b"~~~").is_some()); assert!(is_codefence(b"`````````").is_some()); assert!(is_codefence(b"~~~~").is_some()); assert!(is_codefence(b" ```").is_some()); assert!(is_codefence(b" ~~~").is_some()); assert!(is_codefence(b" ~~").is_none()); assert!(is_codefence(b" ``").is_none()); assert!(is_codefence(b" ```").is_none()); assert!(is_codefence(b"\t```").is_none()); // Test if the remaining buf actually works. let s = b" ```remaining\n"; let res = is_codefence(s); assert!(res.is_some()); assert_eq!(res.unwrap(), (b"remaining\n", 3, BACKTICK)); let s = b" ~~~~~~~~remaining\n"; let res = is_codefence(s); assert!(res.is_some()); assert_eq!(res.unwrap(), (b"remaining\n", 8, TILDE)); }<|fim▁end|>
// Note that buf.head() == Some(&ch) is much faster than buf[0] == ch. //
<|file_name|>DiscoveryPacket.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export default interface DiscoveryPacket { timestamp: number; apiVersion: number; port: number; }<|fim▁end|>
<|file_name|>bitcoin_ca_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ca_ES" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About ShardScrypt</source> <translation>Sobre ShardScrypt</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;ShardScrypt&lt;/b&gt; version</source> <translation>&lt;b&gt;ShardScrypt&lt;/b&gt; versió</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation>\n Aquest és software experimental.\n\n Distribuït sota llicència de software MIT/11, veure l&apos;arxiu COPYING o http://www.opensource.org/licenses/mit-license.php.\n\nAquest producte inclou software desarrollat pel projecte OpenSSL per a l&apos;ús de OppenSSL Toolkit (http://www.openssl.org/) i de softwqre criptogràfic escrit per l&apos;Eric Young ([email protected]) i software UPnP escrit per en Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Copyright</translation> </message> <message> <location line="+0"/> <source>The ShardScrypt developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Llibreta d&apos;adreces</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Feu doble clic per editar l&apos;adreça o l&apos;etiqueta</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Crear una nova adreça</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copiar l&apos;adreça seleccionada al porta-retalls del sistema</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nova adreça</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your ShardScrypt addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Aquestes són les teves adreces ShardScrypt per a rebre pagaments. Pot interesar-te proveïr diferents adreces a cadascun dels enviadors així pots identificar qui et va pagant.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Copiar adreça</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Mostrar codi &amp;QR</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a ShardScrypt address</source> <translation>Signa el missatge per provar que ets propietari de l&apos;adreça ShardScrypt</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Missatge</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Esborrar l&apos;adreça sel·leccionada</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified ShardScrypt address</source> <translation>Verificar un missatge per asegurar-se que ha estat signat amb una adreça ShardScrypt específica</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Esborrar</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your ShardScrypt addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Aquestes són la seva adreça de ShardScrypt per enviar els pagaments. Sempre revisi la quantitat i l&apos;adreça del destinatari abans transferència de monedes.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Copiar &amp;Etiqueta</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Editar</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Enviar &amp;Monedes</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Exporta llibreta d&apos;adreces</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Error en l&apos;exportació</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure a l&apos;arxiu %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adreça</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Dialeg de contrasenya</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Introdueix contrasenya</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nova contrasenya</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repeteix la nova contrasenya</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Introdueixi la nova contrasenya al moneder&lt;br/&gt;Si us plau useu una contrasenya de &lt;b&gt;10 o més caracters aleatoris&lt;/b&gt;, o &lt;b&gt;vuit o més paraules&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Xifrar la cartera</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desbloquejar-lo.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desbloqueja el moneder</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desencriptar-lo.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Desencripta el moneder</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Canviar la contrasenya</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Introdueixi tant l&apos;antiga com la nova contrasenya de moneder.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Confirmar l&apos;encriptació del moneder</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation>Advertència: Si encripteu el vostre moneder i perdeu la constrasenya, &lt;b&gt;PERDREU TOTS ELS VOSTRES LITECOINS&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Esteu segur que voleu encriptar el vostre moneder?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANT: Tota copia de seguretat que hagis realitzat hauria de ser reemplaçada pel, recentment generat, arxiu encriptat del moneder.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Advertència: Les lletres majúscules estàn activades!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Moneder encriptat</translation> </message> <message> <location line="-56"/> <source>ShardScrypt will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your litecoins from being stolen by malware infecting your computer.</source> <translation>ShardScrypt es tancarà ara per acabar el procés d&apos;encriptació. Recorda que encriptar el teu moneder no protegeix completament els teus litecoins de ser robades per programari maliciós instal·lat al teu ordinador.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>L&apos;encriptació del moneder ha fallat</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>L&apos;encriptació del moneder ha fallat per un error intern. El seu moneder no ha estat encriptat.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>La contrasenya introduïda no coincideix.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>El desbloqueig del moneder ha fallat</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La contrasenya introduïda per a desencriptar el moneder és incorrecte.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>La desencriptació del moneder ha fallat</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>La contrasenya del moneder ha estat modificada correctament.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Signar &amp;missatge...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Sincronitzant amb la xarxa ...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Panorama general</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Mostra panorama general del moneder</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transaccions</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Cerca a l&apos;historial de transaccions</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Edita la llista d&apos;adreces emmagatzemada i etiquetes</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Mostra el llistat d&apos;adreces per rebre pagaments</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>S&amp;ortir</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Sortir de l&apos;aplicació</translation> </message> <message> <location line="+4"/> <source>Show information about ShardScrypt</source> <translation>Mostra informació sobre ShardScrypt</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Sobre &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Mostra informació sobre Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opcions...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Xifrar moneder</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Realitzant copia de seguretat del moneder...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Canviar contrasenya...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Important blocs del disc..</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Re-indexant blocs al disc...</translation> </message> <message> <location line="-347"/> <source>Send coins to a ShardScrypt address</source> <translation>Enviar monedes a una adreça ShardScrypt</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for ShardScrypt</source> <translation>Modificar les opcions de configuració per shardscrypt</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Realitzar còpia de seguretat del moneder a un altre directori</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Canviar la constrasenya d&apos;encriptació del moneder</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Finestra de debug</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Obrir la consola de diagnòstic i debugging</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Verifica el missatge..</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>ShardScrypt</source> <translation>ShardScrypt</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Enviar</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Rebre</translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Adreces</translation> </message> <message> <location line="+22"/> <source>&amp;About ShardScrypt</source> <translation>&amp;Sobre ShardScrypt</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Mostrar / Amagar</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Mostrar o amagar la finestra principal</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Xifrar les claus privades pertanyents al seu moneder</translation> </message> <message> <location line="+7"/> <source>Sign messages with your ShardScrypt addresses to prove you own them</source> <translation>Signa el missatges amb la seva adreça de ShardScrypt per provar que les poseeixes</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified ShardScrypt addresses</source> <translation>Verificar els missatges per assegurar-te que han estat signades amb una adreça ShardScrypt específica.</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Arxiu</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Configuració</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Ajuda</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Barra d&apos;eines de seccions</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>ShardScrypt client</source> <translation>Client ShardScrypt</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to ShardScrypt network</source> <translation><numerusform>%n connexió activa a la xarxa ShardScrypt</numerusform><numerusform>%n connexions actives a la xarxa ShardScrypt</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Processat el %1 de %2 (estimat) dels blocs del històric de transaccions.</translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Proccessats %1 blocs del històric de transaccions.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>%n hora</numerusform><numerusform>%n hores</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n dia</numerusform><numerusform>%n dies</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n setmana</numerusform><numerusform>%n setmanes</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>%1 radera</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Lúltim bloc rebut ha estat generat fa %1.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Les transaccions a partir d&apos;això no seràn visibles.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Informació</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Aquesta transacció supera el límit de tamany. Tot i així pots enviar-la amb una comissió de %1, que es destinen als nodes que processen la seva transacció i ajuda a donar suport a la xarxa. Vols pagar la comissió?</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Al dia</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Posar-se al dia ...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Confirmar comisió de transacció</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Transacció enviada</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Transacció entrant</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1\nQuantitat %2\n Tipus: %3\n Adreça: %4\n</translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Manejant URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid ShardScrypt address or malformed URI parameters.</source> <translation>la URI no pot ser processada! Això es pot ser causat per una adreça ShardScrypt invalida o paràmetres URI malformats.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;desbloquejat&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;bloquejat&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. ShardScrypt can no longer continue safely and will quit.</source> <translation>Ha tingut lloc un error fatal. ShardScrypt no pot continuar executant-se de manera segura i es tancará.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Alerta de xarxa</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Editar Adreça</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiqueta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Etiqueta associada amb aquesta entrada de la llibreta d&apos;adreces</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Direcció</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adreça associada amb aquesta entrada de la llibreta d&apos;adreces. Només pot ser modificat per a enviar adreces.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nova adreça de recepció.</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nova adreça d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Editar adreces de recepció</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Editar adreces d&apos;enviament</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>L&apos;adreça introduïda &quot;%1&quot; ja és present a la llibreta d&apos;adreces.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid ShardScrypt address.</source> <translation>L&apos;adreça introduida &quot;%1&quot; no és una adreça ShardScrypt valida.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>No s&apos;ha pogut desbloquejar el moneder.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Ha fallat la generació d&apos;una nova clau.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>ShardScrypt-Qt</source> <translation>ShardScrypt-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>versió</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>Opcions de la línia d&apos;ordres</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Opcions de IU</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Definir llenguatge, per exemple &quot;de_DE&quot; (per defecte: Preferències locals de sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Iniciar minimitzat</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Mostrar finestra de benvinguda a l&apos;inici (per defecte: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opcions</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principal</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Pagar &amp;comisió de transacció</translation> </message> <message> <location line="+31"/> <source>Automatically start ShardScrypt after logging in to the system.</source> <translation>Iniciar automàticament ShardScrypt després de l&apos;inici de sessió del sistema.</translation> </message> <message> <location line="+3"/> <source>&amp;Start ShardScrypt on system login</source> <translation>&amp;Iniciar ShardScrypt al inici de sessió del sistema.</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Reestablir totes les opcions del client.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>&amp;Reestablir Opcions</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Xarxa</translation> </message> <message> <location line="+6"/> <source>Automatically open the ShardScrypt client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Obrir el port del client de ShardScrypt al router de forma automàtica. Això només funciona quan el teu router implementa UPnP i l&apos;opció està activada.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Port obert amb &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the ShardScrypt network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Connectar a la xarxa ShardScrypt a través de un SOCKS proxy (per exemple connectant a través de Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Connecta a través de un proxy SOCKS:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP del proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Adreça IP del proxy (per exemple 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port del proxy (per exemple 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Versió de SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versió SOCKS del proxy (per exemple 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Finestra</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Mostrar només l&apos;icona de la barra al minimitzar l&apos;aplicació.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimitzar a la barra d&apos;aplicacions</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimitza en comptes de sortir de la aplicació al tancar la finestra. Quan aquesta opció està activa, la aplicació només es tancarà al seleccionar Sortir al menú.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimitzar al tancar</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Pantalla</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Llenguatge de la Interfície d&apos;Usuari:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting ShardScrypt.</source> <translation>Aquí pots definir el llenguatge de l&apos;aplicatiu. Aquesta configuració tindrà efecte un cop es reiniciï ShardScrypt.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unitats per mostrar les quantitats en:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Sel·lecciona la unitat de subdivisió per defecte per mostrar en la interficie quan s&apos;envien monedes.</translation> </message> <message> <location line="+9"/> <source>Whether to show ShardScrypt addresses in the transaction list or not.</source> <translation>Mostrar adreces ShardScrypt als llistats de transaccions o no.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Mostrar adreces al llistat de transaccions</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancel·la</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Aplicar</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>Per defecte</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Confirmi el reestabliment de les opcions</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Algunes configuracions poden requerir reiniciar el client per a que tinguin efecte.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Vols procedir?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting ShardScrypt.</source> <translation>Aquesta configuració tindrà efecte un cop es reiniciï ShardScrypt.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>L&apos;adreça proxy introduïda és invalida.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the ShardScrypt network after a connection is established, but this process has not completed yet.</source> <translation>La informació mostrada pot no estar al día. El teu moneder es sincronitza automàticament amb la xarxa ShardScrypt un cop s&apos;ha establert connexió, però aquest proces no s&apos;ha completat encara.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Sense confirmar:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Immatur:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Balanç minat que encara no ha madurat</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Transaccions recents&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>El seu balanç actual</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Total de transaccions encara sense confirmar, que encara no es content en el balanç actual</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>Fora de sincronia</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start shardscrypt: click-to-pay handler</source> <translation>No es pot iniciar shardscrypt: manejador clicla-per-pagar</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Dialeg del codi QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Reclamar pagament</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Etiqueta:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Missatge:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Desar com...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Error codificant la URI en un codi QR.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>La quantitat introduïda és invalida, si us plau comprovi-la.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>URI resultant massa llarga, intenta reduir el text per a la etiqueta / missatge</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Desar codi QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Imatges PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nom del client</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Versió del client</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Utilitzant OpenSSL versió</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>&amp;Temps d&apos;inici</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Xarxa</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Nombre de connexions</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>A testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Bloquejar cadena</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Nombre de blocs actuals</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Total estimat de blocs</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Últim temps de bloc</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Obrir</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Opcions de línia d&apos;ordres</translation> </message> <message> <location line="+7"/> <source>Show the ShardScrypt-Qt help message to get a list with possible ShardScrypt command-line options.</source> <translation>Mostrar el missatge d&apos;ajuda de ShardScrypt-Qt per a obtenir un llistat de possibles ordres per a la línia d&apos;ordres de ShardScrypt.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Mostrar</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Consola</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Data de compilació</translation> </message> <message> <location line="-104"/> <source>ShardScrypt - Debug window</source> <translation>ShardScrypt -Finestra de debug</translation> </message> <message> <location line="+25"/> <source>ShardScrypt Core</source> <translation>Nucli de ShardScrypt</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Dietàri de debug</translation> </message> <message> <location line="+7"/> <source>Open the ShardScrypt debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Obrir el dietari de debug de ShardScrypt del directori de dades actual. Aixó pot trigar uns quants segons per a dietàris grossos.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Netejar consola</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the ShardScrypt RPC console.</source> <translation>Benvingut a la consola RPC de ShardScrypt</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Utilitza les fletxes d&apos;amunt i avall per navegar per l&apos;històric, i &lt;b&gt;Ctrl-L&lt;\b&gt; per netejar la pantalla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Escriu &lt;b&gt;help&lt;\b&gt; per a obtenir una llistat de les ordres disponibles.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Enviar a multiples destinataris al mateix temps</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Affegir &amp;Destinatari</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Netejar tots els camps de la transacció</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirmi l&apos;acció d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>E&amp;nviar</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirmar l&apos;enviament de monedes</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Estas segur que vols enviar %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>i</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>L&apos;adreça remetent no és vàlida, si us plau comprovi-la.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>La quantitat a pagar ha de ser major que 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Import superi el saldo de la seva compte.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>El total excedeix el teu balanç quan s&apos;afegeix la comisió a la transacció %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>S&apos;ha trobat una adreça duplicada, tan sols es pot enviar a cada adreça un cop per ordre de enviament.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Error: La ceació de la transacció ha fallat!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha estat rebutjada. Això pot passar si alguna de les monedes del teu moneder ja s&apos;han gastat, com si haguesis usat una copia de l&apos;arxiu wallet.dat i s&apos;haguessin gastat monedes de la copia però sense marcar com gastades en aquest.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Q&amp;uantitat:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Pagar &amp;A:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>La adreça a on envia el pagament (per exemple: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Introdueixi una etiquera per a aquesta adreça per afegir-la a la llibreta d&apos;adreces</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Etiqueta:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Escollir adreça del llibre d&apos;adreces</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Eliminar aquest destinatari</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a ShardScrypt address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Introdueixi una adreça de ShardScrypt (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Signatures .Signar/Verificar un Missatge</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Signar Missatge</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Pots signar missatges amb la teva adreça per provar que són teus. Sigues cautelòs al signar qualsevol cosa, ja que els atacs phising poden intentar confondre&apos;t per a que els hi signis amb la teva identitat. Tan sols signa als documents completament detallats amb els que hi estàs d&apos;acord.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>La adreça amb la que signat els missatges (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Escollir una adreça de la llibreta de direccions</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Introdueix aqui el missatge que vols signar</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Signatura</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Copiar la signatura actual al porta-retalls del sistema</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this ShardScrypt address</source> <translation>Signa el missatge per provar que ets propietari d&apos;aquesta adreça ShardScrypt</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Missatge</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Neteja tots els camps de clau</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Introdueixi l&apos;adreça signant, missatge (assegura&apos;t que copies salts de línia, espais, tabuladors, etc excactament tot el text) i la signatura a sota per verificar el missatge. Per evitar ser enganyat per un atac home-entre-mig, vés amb compte de no llegir més en la signatura del que hi ha al missatge signat mateix.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>La adreça amb el que el missatge va ser signat (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified ShardScrypt address</source> <translation>Verificar el missatge per assegurar-se que ha estat signat amb una adreça ShardScrypt específica</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Verificar &amp;Missatge</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Neteja tots els camps de verificació de missatge</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a ShardScrypt address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Introdueixi una adreça de ShardScrypt (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Clica &quot;Signar Missatge&quot; per a generar una signatura</translation> </message> <message> <location line="+3"/> <source>Enter ShardScrypt signature</source> <translation>Introduïr una clau ShardScrypt</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>L&apos;adreça intoduïda és invàlida.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Siu us plau, comprovi l&apos;adreça i provi de nou.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>L&apos;adreça introduïda no referencia a cap clau.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>El desbloqueig del moneder ha estat cancelat.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>La clau privada per a la adreça introduïda no està disponible.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>El signat del missatge ha fallat.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Missatge signat.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>La signatura no s&apos;ha pogut decodificar .</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Su us plau, comprovi la signatura i provi de nou.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>La signatura no coincideix amb el resum del missatge.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Ha fallat la verificació del missatge.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Missatge verificat.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The ShardScrypt developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/sense confirmar</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confrimacions</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Estat</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, difusió a través de %n node</numerusform><numerusform>, difusió a través de %n nodes</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Font</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generat</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Des de</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>A</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>Adreça pròpia</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etiqueta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Crèdit</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>disponible en %n bloc més</numerusform><numerusform>disponibles en %n blocs més</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>no acceptat</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Dèbit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Comissió de transacció</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Quantitat neta</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Missatge</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comentar</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID de transacció</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Les monedes generades han de madurar 120 blocs abans de poder ser gastades. Quan has generat aquest bloc, aquest ha estat transmés a la xarxa per a ser afegit a la cadena de blocs. Si no arriba a ser acceptat a la cadena, el seu estat passará a &quot;no acceptat&quot; i no podrá ser gastat. Això pot ocòrrer ocasionalment si un altre node genera un bloc a pocs segons del teu.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Informació de debug</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transacció</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>Entrades</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>cert</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>fals</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, encara no ha estat emès correctement</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obre per %n bloc més</numerusform><numerusform>Obre per %n blocs més</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>desconegut</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detall de la transacció</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Aquest panell mostra una descripció detallada de la transacció</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obre per %n bloc més</numerusform><numerusform>Obre per %n blocs més</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Sense connexió (%1 confirmacions)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Sense confirmar (%1 de %2 confirmacions)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmat (%1 confirmacions)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>El saldo recent minat estarà disponible quan venci el termini en %n bloc més</numerusform><numerusform>El saldo recent minat estarà disponible quan venci el termini en %n blocs més</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Aquest bloc no ha estat rebut per cap altre node i probablement no serà acceptat!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generat però no acceptat</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Rebut de</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pagament a un mateix</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Estat de la transacció. Desplaça&apos;t per aquí sobre per mostrar el nombre de confirmacions.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Data i hora en que la transacció va ser rebuda.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipus de transacció.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Adreça del destinatari de la transacció.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Quantitat extreta o afegida del balanç.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Tot</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Avui</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Aquesta setmana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Aquest mes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>El mes passat</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Enguany</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rang...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A tu mateix</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Altres</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Introdueix una adreça o una etiqueta per cercar</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Quantitat mínima</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copiar adreça </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacció</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Editar etiqueta</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Mostra detalls de la transacció</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Exportar detalls de la transacció </translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmat</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Error en l&apos;exportació</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure a l&apos;arxiu %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rang:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>a</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Realitzar còpia de seguretat del moneder</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Dades del moneder (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Còpia de seguretat faillida</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Hi ha hagut un error intentant desar les dades del moneder al nou directori</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Copia de seguretat realitzada correctament</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Les dades del moneder han estat desades cirrectament al nou emplaçament.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>ShardScrypt version</source> <translation>Versió de ShardScrypt</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or litecoind</source> <translation>Enviar comanda a -servidor o litecoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Llista d&apos;ordres</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Obtenir ajuda per a un ordre.</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Opcions:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: shardscrypt.conf)</source> <translation>Especificat arxiu de configuració (per defecte: shardscrypt.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: litecoind.pid)</source> <translation>Especificar arxiu pid (per defecte: litecoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Especificar directori de dades</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Establir tamany de la memoria cau en megabytes (per defecte: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9333 or testnet: 19333)</source> <translation>Escoltar connexions a &lt;port&gt; (per defecte: 9333 o testnet: 19333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Mantenir com a molt &lt;n&gt; connexions a peers (per defecte: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Connectar al node per obtenir les adreces de les connexions, i desconectar</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Especificar la teva adreça pública</translation> </message><|fim▁hole|> <translation>Límit per a desconectar connexions errònies (per defecte: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Nombre de segons abans de reconectar amb connexions errònies (per defecte: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv4: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9332 or testnet: 19332)</source> <translation>Escoltar connexions JSON-RPC al port &lt;port&gt; (per defecte: 9332 o testnet:19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Acceptar línia d&apos;ordres i ordres JSON-RPC </translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Executar en segon pla com a programa dimoni i acceptar ordres</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Usar la xarxa de prova</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Aceptar connexions d&apos;afora (per defecte: 1 si no -proxy o -connect)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=litecoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;ShardScrypt Alert&quot; [email protected] </source> <translation>%s has de establir una contrasenya RPC a l&apos;arxiu de configuració:\n%s\nEs recomana que useu la següent constrasenya aleatòria:\nrpcuser=litecoinrpc\nrpcpassword=%s\n(no necesiteu recordar aquesta contrsenya)\nEl nom d&apos;usuari i contrasenya NO HAN de ser els mateixos.\nSi l&apos;arxiu no existeix, crea&apos;l amb els permisos d&apos;arxiu de només lectura per al propietari.\nTambé es recomana establir la notificació d&apos;alertes i així seràs notificat de les incidències;\nper exemple: alertnotify=echo %%s | mail -s &quot;ShardScrypt Alert&quot; [email protected]</translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv6, retrocedint a IPv4: %s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Vincular a una adreça específica i sempre escoltar-hi. Utilitza la notació [host]:port per IPv6</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. ShardScrypt is probably already running.</source> <translation>No es pot bloquejar el directori de dades %s. Probablement ShardScrypt ja estigui en execució.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha estat rebutjada. Això pot passar si alguna de les monedes del teu moneder ja s&apos;han gastat, com si haguesis usat una copia de l&apos;arxiu wallet.dat i s&apos;haguessin gastat monedes de la copia però sense marcar com gastades en aquest.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Error: Aquesta transacció requereix una comissió d&apos;almenys %s degut al seu import, complexitat o per l&apos;ús de fons recentment rebuts!</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Executar ordre al rebre una alerta rellevant (%s al cmd es reemplaça per message)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Executar una ordre quan una transacció del moneder canviï (%s in cmd es canvia per TxID)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Establir una mida màxima de transaccions d&apos;alta prioritat/baixa comisió en bytes (per defecte: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Aquesta és una versió de pre-llançament - utilitza-la sota la teva responsabilitat - No usar per a minería o aplicacions de compra-venda</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Advertència: el -paytxfee és molt elevat! Aquesta és la comissió de transacció que pagaràs quan enviis una transacció.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Advertència: Les transaccions mostrades poden no ser correctes! Pot esser que necessitis actualitzar, o bé que altres nodes ho necessitin.</translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong ShardScrypt will not work properly.</source> <translation>Advertència: Si us plau comprovi que la data i hora del seu computador siguin correctes! Si el seu rellotge està mal configurat, ShardScrypt no funcionará de manera apropiada.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Advertència: Error llegint l&apos;arxiu wallet.dat!! Totes les claus es llegeixen correctament, però hi ha dades de transaccions o entrades del llibre d&apos;adreces absents o bé son incorrectes.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Advertència: L&apos;arxiu wallet.dat és corrupte, dades rescatades! L&apos;arxiu wallet.dat original ha estat desat com wallet.{estampa_temporal}.bak al directori %s; si el teu balanç o transaccions son incorrectes hauries de restaurar-lo de un backup.</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Intentar recuperar les claus privades d&apos;un arxiu wallet.dat corrupte</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Opcions de la creació de blocs:</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Connectar només al(s) node(s) especificats</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>S&apos;ha detectat una base de dades de blocs corrupta</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Descobrir la pròpia adreça IP (per defecte: 1 quan escoltant i no -externalip)</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Vols reconstruir la base de dades de blocs ara?</translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Error carregant la base de dades de blocs</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Error inicialitzant l&apos;entorn de la base de dades del moneder %s!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Error carregant la base de dades del bloc</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Error obrint la base de dades de blocs</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Error: Espai al disc baix!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Error: El moneder està blocat, no és possible crear la transacció!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Error: error de sistema:</translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Error al escoltar a qualsevol port. Utilitza -listen=0 si vols això.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Ha fallat la lectura de la informació del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Ha fallat la lectura del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Ha fallat la sincronització de l&apos;índex de bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Ha fallat la escriptura de l&apos;índex de blocs</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Ha fallat la escriptura de la informació de bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Ha fallat l&apos;escriptura del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Ha fallat l&apos;escriptura de l&apos;arxiu info</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Ha fallat l&apos;escriptura de la basse de dades de monedes</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Ha fallat l&apos;escriptura de l&apos;índex de transaccions</translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Ha fallat el desfer de dades</translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Cerca punts de connexió usant rastreig de DNS (per defecte: 1 tret d&apos;usar -connect)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Quants blocs s&apos;han de confirmar a l&apos;inici (per defecte: 288, 0 = tots)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Com verificar el bloc (0-4, per defecte 3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Reconstruir l&apos;índex de la cadena de blocs dels arxius actuals blk000??.dat</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Estableix el nombre de fils per atendre trucades RPC (per defecte: 4)</translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Verificant blocs...</translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Verificant moneder...</translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Importa blocs de un fitxer blk000??.dat extern</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Adreça -tor invàlida: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Mantenir tot l&apos;índex de transaccions (per defecte: 0)</translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Mida màxima del buffer de recepció per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Mida màxima del buffer d&apos;enviament per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Tan sols acceptar cadenes de blocs que coincideixin amb els punts de prova (per defecte: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Només connectar als nodes de la xarxa &lt;net&gt; (IPv4, IPv6 o Tor)</translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Sortida de la informació extra de debugging. Implica totes les demés opcions -debug*</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Sortida de la informació extra de debugging de xarxa.</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Anteposar estampa temporal a les dades de debug</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the ShardScrypt Wiki for SSL setup instructions)</source> <translation>Opcions SSL: (veure la Wiki de ShardScrypt per a instruccions de configuració SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Selecciona la versió de socks proxy a utilitzar (4-5, per defecte: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Enviar informació de traça/debug a la consola en comptes del arxiu debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Enviar informació de traça/debug a un debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Establir una mida màxima de bloc en bytes (per defecte: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Establir una mida mínima de bloc en bytes (per defecte: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Reduir l&apos;arxiu debug.log al iniciar el client (per defecte 1 quan no -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Especificar el temps limit per a un intent de connexió en milisegons (per defecte: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Error de sistema:</translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 1 quan s&apos;escolta)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Utilitzar proxy per arribar als serveis tor amagats (per defecte: el mateix que -proxy)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Nom d&apos;usuari per a connexions JSON-RPC</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Advertència: Aquetsa versió està obsoleta, és necessari actualitzar!</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Necessiteu reconstruir les bases de dades usant -reindex per canviar -txindex</translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>L&apos;arxiu wallet.data és corrupte, el rescat de les dades ha fallat</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Contrasenya per a connexions JSON-RPC</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Permetre connexions JSON-RPC d&apos;adreces IP específiques</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Enviar ordre al node en execució a &lt;ip&gt; (per defecte: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Executar orde quan el millor bloc canviï (%s al cmd es reemplaça per un bloc de hash)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Actualitzar moneder a l&apos;últim format</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Establir límit de nombre de claus a &lt;n&gt; (per defecte: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Re-escanejar cadena de blocs en cerca de transaccions de moneder perdudes</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Utilitzar OpenSSL (https) per a connexions JSON-RPC</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Arxiu del certificat de servidor (per defecte: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Clau privada del servidor (per defecte: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Xifrats acceptats (per defecte: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Aquest misatge d&apos;ajuda</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Impossible d&apos;unir %s a aquest ordinador (s&apos;ha retornat l&apos;error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Connectar a través de socks proxy</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Permetre consultes DNS per a -addnode, -seednode i -connect</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Carregant adreces...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error carregant wallet.dat: Moneder corrupte</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of ShardScrypt</source> <translation>Error carregant wallet.dat: El moneder requereix una versió de ShardScrypt més moderna</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart ShardScrypt to complete</source> <translation>El moneder necesita ser re-escrit: re-inicia ShardScrypt per a completar la tasca</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Error carregant wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Adreça -proxy invalida: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Xarxa desconeguda especificada a -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>S&apos;ha demanat una versió desconeguda de -socks proxy: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -bind: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -externalip: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Quantitat invalida per a -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Quanitat invalida</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Balanç insuficient</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Carregant índex de blocs...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Afegir un node per a connectar&apos;s-hi i intentar mantenir la connexió oberta</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. ShardScrypt is probably already running.</source> <translation>Impossible d&apos;unir %s en aquest ordinador. Probablement ShardScrypt ja estigui en execució.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Comisió a afegir per cada KB de transaccions que enviïs</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Carregant moneder...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>No es pot reduir la versió del moneder</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>No es pot escriure l&apos;adreça per defecte</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Re-escanejant...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Càrrega acabada</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Utilitza la opció %s</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Has de configurar el rpcpassword=&lt;password&gt; a l&apos;arxiu de configuració:\n %s\n Si l&apos;arxiu no existeix, crea&apos;l amb els permís owner-readable-only.</translation> </message> </context> </TS><|fim▁end|>
<message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<|file_name|>view_major.py<|end_file_name|><|fim▁begin|>from flask import * from playhouse.flask_utils import * import string from app import app from model import Major, Minor, Store, Transaction, Item @app.route('/major', methods=['GET', 'POST']) def major_list(): query = Major \ .select(Major, Minor) \ .join(Minor, on=(Major.id == Minor.major).alias('minor')) \ .order_by(Major.id) last = None minors = [] majors = [] for major in query: minor = { 'id': major.minor.id, 'name': major.minor.name } if last != None and major.id != last.id: majors.append({'id': last.id, 'income': last.income, 'name': last.name, 'minors': minors}) minors = [minor] else: minors.append(minor) last = major if last != None: majors.append({'id': last.id, 'income': last.income, 'name': last.name, 'minors': minors}) return render_template('major.html', majors=majors) @app.route('/major/add', methods=['GET', 'POST']) def major_add(): if request.method == 'POST': if request.form.get('major_id'): major = get_object_or_404(Major, Major.id == request.form['major_id']) minors = Minor.listWithStats(request.form['major_id']) major.name = request.form['name'] major.income = bool(request.form.get('income')) major.save() flash('Category #%d updated successfully.' % major.id, 'success') else: major = Major.create(name=request.form['name'], income=bool(request.form.get('income'))) minors = [] for minor_name in string.split(request.form['minors'], ','): if len(minor_name) > 0: minor = Minor.create(name=string.strip(minor_name), major=major) minors.append(minor) flash('A category created successfully.', 'success') return render_template('major.html', major=major, minors=minors) return render_template('major.html') @app.route('/major/<int:id>', methods=['GET', 'POST']) def major_detail(id): major = get_object_or_404(Major, Major.id == id) minors = Minor.listWithStats(id) num_items = 0 for minor in minors: num_items += minor.count return render_template('major.html', major=major, minors=minors, num_items=num_items) @app.route('/major/delete/<int:id>', methods=['GET', 'POST']) def major_delete(id): major = get_object_or_404(Major, Major.id == id) major.delete_instance() minors = Minor.delete().where(Minor.major == id).execute() flash('Category #%d is deleted.' % id, 'success') return jsonify(success=True) @app.route('/_minor/add', methods=['POST']) def minor_add(): try: major_id = request.form['major_id'] major = get_object_or_404(Major, Major.id == major_id) minor = Minor.create(name=request.form['name'], major=major) except: flash('Category #%d not found.' % major_id, 'danger') return jsonify(success=False) flash('A new subcategory is added.', 'success') return jsonify(success=True) @app.route('/_minor/delete/<int:id>', methods=['GET']) def minor_delete(id): try: minor = get_object_or_404(Minor, Minor.id == id) minor.delete_instance() except: return jsonify(success=False) return jsonify(success=True) @app.route('/minor/<int:id>', methods=['GET']) def minor_detail(id): minor = get_object_or_404(Minor, Minor.id == id)<|fim▁hole|> majors = Major.select().order_by(Major.id) return render_template('minor.html', minor=minor, majors=majors) @app.route('/_minor/edit/<int:id>', methods=['POST']) def minor_edit(id): try: minor = Minor.get(Minor.id == id) minor.name = request.form['name'] minor.major = request.form['major_id'] minor.save() except: return jsonify(success=False) return jsonify(success=True)<|fim▁end|>
<|file_name|>time-until-element.ts<|end_file_name|><|fim▁begin|>import RelativeTime from './relative-time' import RelativeTimeElement from './relative-time-element' import {localeFromElement} from './utils' export default class TimeUntilElement extends RelativeTimeElement { getFormattedDate(): string | undefined { const format = this.getAttribute('format') const date = this.date if (!date) return if (format === 'micro') { return new RelativeTime(date, localeFromElement(this)).microTimeUntil() } else { return new RelativeTime(date, localeFromElement(this)).timeUntil() } } } if (!window.customElements.get('time-until')) { window.TimeUntilElement = TimeUntilElement window.customElements.define('time-until', TimeUntilElement) } declare global { interface Window { TimeUntilElement: typeof TimeUntilElement } interface HTMLElementTagNameMap {<|fim▁hole|><|fim▁end|>
'time-until': TimeUntilElement } }
<|file_name|>input-base.js<|end_file_name|><|fim▁begin|>var __extends = (this && this.__extends) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; (function (factory) { if (typeof module === 'object' && typeof module.exports === 'object') { var v = factory(require, exports); if (v !== undefined) module.exports = v; } else if (typeof define === 'function' && define.amd) { define(["require", "exports", '@angular/core', '@angular/forms', '../app/app', '../../util/dom', '../../config/config', '../content/content', '../../util/dom-controller', '../../util/form', '../ion', '../../util/util', '../item/item', '../../navigation/nav-controller', '../../platform/platform'], factory); } })(function (require, exports) { "use strict"; var core_1 = require('@angular/core'); var forms_1 = require('@angular/forms'); var app_1 = require('../app/app'); var dom_1 = require('../../util/dom'); var config_1 = require('../../config/config'); var content_1 = require('../content/content'); var dom_controller_1 = require('../../util/dom-controller'); var form_1 = require('../../util/form'); var ion_1 = require('../ion'); var util_1 = require('../../util/util'); var item_1 = require('../item/item'); var nav_controller_1 = require('../../navigation/nav-controller'); var platform_1 = require('../../platform/platform'); /** * @private * Hopefully someday a majority of the auto-scrolling tricks can get removed. */ var InputBase = (function (_super) { __extends(InputBase, _super); function InputBase(config, _form, _item, _app, _platform, elementRef, renderer, _content, nav, ngControl, _dom) { var _this = this; _super.call(this, config, elementRef, renderer, 'input'); this._form = _form; this._item = _item; this._app = _app; this._platform = _platform; this._content = _content; this._dom = _dom; this._disabled = false; this._type = 'text'; this._value = ''; this._nav = nav; this._useAssist = config.getBoolean('scrollAssist', false); this._usePadding = config.getBoolean('scrollPadding', this._useAssist); this._keyboardHeight = config.getNumber('keyboardHeight'); this._autoFocusAssist = config.get('autoFocusAssist', 'delay'); this._autoComplete = config.get('autocomplete', 'off'); this._autoCorrect = config.get('autocorrect', 'off'); if (ngControl) { ngControl.valueAccessor = this; this.inputControl = ngControl; } _form.register(this); // only listen to content scroll events if there is content if (_content) { this._scrollStart = _content.ionScrollStart.subscribe(function (ev) { _this.scrollHideFocus(ev, true); }); this._scrollEnd = _content.ionScrollEnd.subscribe(function (ev) { _this.scrollHideFocus(ev, false); }); } } InputBase.prototype.scrollHideFocus = function (ev, shouldHideFocus) { var _this = this; // do not continue if there's no nav, or it's transitioning if (!this._nav) return; // DOM READ: check if this input has focus if (this.hasFocus()) { // if it does have focus, then do the dom write this._dom.write(function () { _this._native.hideFocus(shouldHideFocus); }); } }; InputBase.prototype.setItemInputControlCss = function () { var item = this._item; var nativeInput = this._native; var inputControl = this.inputControl; // Set the control classes on the item if (item && inputControl) { this.setControlCss(item, inputControl); } // Set the control classes on the native input if (nativeInput && inputControl) { this.setControlCss(nativeInput, inputControl); } }; InputBase.prototype.setControlCss = function (element, control) { element.setElementClass('ng-untouched', control.untouched); element.setElementClass('ng-touched', control.touched); element.setElementClass('ng-pristine', control.pristine); element.setElementClass('ng-dirty', control.dirty); element.setElementClass('ng-valid', control.valid); element.setElementClass('ng-invalid', !control.valid); }; InputBase.prototype.setValue = function (val) { this._value = val; this.checkHasValue(val); }; InputBase.prototype.setType = function (val) { this._type = 'text'; if (val) { val = val.toLowerCase(); if (/password|email|number|search|tel|url|date|month|time|week/.test(val)) { this._type = val; } } }; InputBase.prototype.setDisabled = function (val) { this._disabled = util_1.isTrueProperty(val); this._item && this._item.setElementClass('item-input-disabled', this._disabled); this._native && this._native.isDisabled(this._disabled); }; InputBase.prototype.setClearOnEdit = function (val) { this._clearOnEdit = util_1.isTrueProperty(val); }; /** * Check if we need to clear the text input if clearOnEdit is enabled * @private */ InputBase.prototype.checkClearOnEdit = function (inputValue) { if (!this._clearOnEdit) { return; } // Did the input value change after it was blurred and edited? if (this._didBlurAfterEdit && this.hasValue()) { // Clear the input this.clearTextInput(); } // Reset the flag this._didBlurAfterEdit = false; }; /** * Overriden in child input * @private */ InputBase.prototype.clearTextInput = function () { }; /** * @private */ InputBase.prototype.setNativeInput = function (nativeInput) { var _this = this; this._native = nativeInput; if (this._item && this._item.labelId !== null) { nativeInput.labelledBy(this._item.labelId); } nativeInput.valueChange.subscribe(function (inputValue) { _this.onChange(inputValue); }); nativeInput.keydown.subscribe(function (inputValue) { _this.onKeydown(inputValue); }); this.focusChange(this.hasFocus()); nativeInput.focusChange.subscribe(function (textInputHasFocus) { _this.focusChange(textInputHasFocus); _this.checkHasValue(nativeInput.getValue()); if (!textInputHasFocus) { _this.onTouched(textInputHasFocus); } }); this.checkHasValue(nativeInput.getValue()); this.setDisabled(this._disabled); var ionInputEle = this._elementRef.nativeElement; var nativeInputEle = nativeInput.element(); // copy ion-input attributes to the native input element dom_1.copyInputAttributes(ionInputEle, nativeInputEle); if (ionInputEle.hasAttribute('autofocus')) { // the ion-input element has the autofocus attributes ionInputEle.removeAttribute('autofocus'); if (this._autoFocusAssist === 'immediate') { // config says to immediate focus on the input // works best on android devices nativeInputEle.focus(); } else if (this._autoFocusAssist === 'delay') { // config says to chill out a bit and focus on the input after transitions // works best on desktop setTimeout(function () { nativeInputEle.focus(); }, 650); } } // by default set autocomplete="off" unless specified by the input if (ionInputEle.hasAttribute('autocomplete')) { this._autoComplete = ionInputEle.getAttribute('autocomplete'); } nativeInputEle.setAttribute('autocomplete', this._autoComplete); // by default set autocorrect="off" unless specified by the input if (ionInputEle.hasAttribute('autocorrect')) { this._autoCorrect = ionInputEle.getAttribute('autocorrect'); } nativeInputEle.setAttribute('autocorrect', this._autoCorrect); }; /** * @private */ InputBase.prototype.setNextInput = function (nextInput) { var _this = this; if (nextInput) { nextInput.focused.subscribe(function () { _this._form.tabFocus(_this); }); } }; /** * @private * Angular2 Forms API method called by the model (Control) on change to update * the checked value. * https://github.com/angular/angular/blob/master/modules/angular2/src/forms/directives/shared.ts#L34 */ InputBase.prototype.writeValue = function (val) { this._value = val; this.checkHasValue(val); }; /** * @private */ InputBase.prototype.onChange = function (val) { this.checkHasValue(val); }; /** * onKeydown handler for clearOnEdit * @private */ InputBase.prototype.onKeydown = function (val) { if (this._clearOnEdit) { this.checkClearOnEdit(val); } }; /** * @private */ InputBase.prototype.onTouched = function (val) { }; /** * @private */ InputBase.prototype.hasFocus = function () { // check if an input has focus or not return this._native.hasFocus(); }; /** * @private */ InputBase.prototype.hasValue = function () { var inputValue = this._value; return (inputValue !== null && inputValue !== undefined && inputValue !== ''); }; /** * @private */ InputBase.prototype.checkHasValue = function (inputValue) { if (this._item) { var hasValue = (inputValue !== null && inputValue !== undefined && inputValue !== ''); this._item.setElementClass('input-has-value', hasValue); } }; /** * @private */ InputBase.prototype.focusChange = function (inputHasFocus) { if (this._item) { (void 0) /* console.debug */; this._item.setElementClass('input-has-focus', inputHasFocus); } // If clearOnEdit is enabled and the input blurred but has a value, set a flag if (this._clearOnEdit && !inputHasFocus && this.hasValue()) { this._didBlurAfterEdit = true; } }; InputBase.prototype.pointerStart = function (ev) { // input cover touchstart if (ev.type === 'touchstart') { this._isTouch = true; } if ((this._isTouch || (!this._isTouch && ev.type === 'mousedown')) && this._app.isEnabled()) { // remember where the touchstart/mousedown started this._coord = dom_1.pointerCoord(ev); } (void 0) /* console.debug */; }; InputBase.prototype.pointerEnd = function (ev) { // input cover touchend/mouseup (void 0) /* console.debug */; if ((this._isTouch && ev.type === 'mouseup') || !this._app.isEnabled()) { // the app is actively doing something right now // don't try to scroll in the input ev.preventDefault(); ev.stopPropagation(); } else if (this._coord) { // get where the touchend/mouseup ended var endCoord = dom_1.pointerCoord(ev); // focus this input if the pointer hasn't moved XX pixels // and the input doesn't already have focus if (!dom_1.hasPointerMoved(8, this._coord, endCoord) && !this.hasFocus()) { ev.preventDefault(); ev.stopPropagation(); // begin the input focus process this.initFocus(); } } this._coord = null; }; /** * @private */ InputBase.prototype.initFocus = function () { var _this = this; // begin the process of setting focus to the inner input element var content = this._content; (void 0) /* console.debug */; if (content) { // this input is inside of a scroll view // find out if text input should be manually scrolled into view // get container of this input, probably an ion-item a few nodes up var ele = this._elementRef.nativeElement; ele = ele.closest('ion-item,[ion-item]') || ele; var scrollData = getScrollData(ele.offsetTop, ele.offsetHeight, content.getContentDimensions(), this._keyboardHeight, this._platform.height()); if (Math.abs(scrollData.scrollAmount) < 4) { // the text input is in a safe position that doesn't // require it to be scrolled into view, just set focus now this.setFocus(); // all good, allow clicks again this._app.setEnabled(true); this._nav && this._nav.setTransitioning(false); if (this._usePadding) { content.clearScrollPaddingFocusOut(); } return; } if (this._usePadding) { // add padding to the bottom of the scroll view (if needed) content.addScrollPadding(scrollData.scrollPadding); } // manually scroll the text input to the top // do not allow any clicks while it's scrolling var scrollDuration = getScrollAssistDuration(scrollData.scrollAmount); this._app.setEnabled(false, scrollDuration); this._nav && this._nav.setTransitioning(true); // temporarily move the focus to the focus holder so the browser // doesn't freak out while it's trying to get the input in place // at this point the native text input still does not have focus this._native.beginFocus(true, scrollData.inputSafeY); // scroll the input into place content.scrollTo(0, scrollData.scrollTo, scrollDuration, function () { (void 0) /* console.debug */; // the scroll view is in the correct position now // give the native text input focus _this._native.beginFocus(false, 0); // ensure this is the focused input _this.setFocus(); // all good, allow clicks again _this._app.setEnabled(true); _this._nav && _this._nav.setTransitioning(false); if (_this._usePadding) { content.clearScrollPaddingFocusOut(); } }); } else { // not inside of a scroll view, just focus it this.setFocus(); } }; /** * @private */ InputBase.prototype.setFocus = function () { // immediately set focus this._form.setAsFocused(this); // set focus on the actual input element (void 0) /* console.debug */; this._native.setFocus(); // ensure the body hasn't scrolled down document.body.scrollTop = 0; }; /** * @private * Angular2 Forms API method called by the view (formControlName) to register the * onChange event handler that updates the model (Control). * @param {Function} fn the onChange event handler. */ InputBase.prototype.registerOnChange = function (fn) { this.onChange = fn; }; /** * @private * Angular2 Forms API method called by the view (formControlName) to register * the onTouched event handler that marks model (Control) as touched. * @param {Function} fn onTouched event handler. */ InputBase.prototype.registerOnTouched = function (fn) { this.onTouched = fn; }; InputBase.prototype.focusNext = function () { this._form.tabFocus(this); }; /** @nocollapse */ InputBase.ctorParameters = [<|fim▁hole|> { type: platform_1.Platform, }, { type: core_1.ElementRef, }, { type: core_1.Renderer, }, { type: content_1.Content, decorators: [{ type: core_1.Optional },] }, { type: nav_controller_1.NavController, }, { type: forms_1.NgControl, }, { type: dom_controller_1.DomController, }, ]; return InputBase; }(ion_1.Ion)); exports.InputBase = InputBase; /** * @private */ function getScrollData(inputOffsetTop, inputOffsetHeight, scrollViewDimensions, keyboardHeight, plaformHeight) { // compute input's Y values relative to the body var inputTop = (inputOffsetTop + scrollViewDimensions.contentTop - scrollViewDimensions.scrollTop); var inputBottom = (inputTop + inputOffsetHeight); // compute the safe area which is the viewable content area when the soft keyboard is up var safeAreaTop = scrollViewDimensions.contentTop; var safeAreaHeight = (plaformHeight - keyboardHeight - safeAreaTop) / 2; var safeAreaBottom = safeAreaTop + safeAreaHeight; // figure out if each edge of teh input is within the safe area var inputTopWithinSafeArea = (inputTop >= safeAreaTop && inputTop <= safeAreaBottom); var inputTopAboveSafeArea = (inputTop < safeAreaTop); var inputTopBelowSafeArea = (inputTop > safeAreaBottom); var inputBottomWithinSafeArea = (inputBottom >= safeAreaTop && inputBottom <= safeAreaBottom); var inputBottomBelowSafeArea = (inputBottom > safeAreaBottom); /* Text Input Scroll To Scenarios --------------------------------------- 1) Input top within safe area, bottom within safe area 2) Input top within safe area, bottom below safe area, room to scroll 3) Input top above safe area, bottom within safe area, room to scroll 4) Input top below safe area, no room to scroll, input smaller than safe area 5) Input top within safe area, bottom below safe area, no room to scroll, input smaller than safe area 6) Input top within safe area, bottom below safe area, no room to scroll, input larger than safe area 7) Input top below safe area, no room to scroll, input larger than safe area */ var scrollData = { scrollAmount: 0, scrollTo: 0, scrollPadding: 0, inputSafeY: 0 }; if (inputTopWithinSafeArea && inputBottomWithinSafeArea) { // Input top within safe area, bottom within safe area // no need to scroll to a position, it's good as-is return scrollData; } // looks like we'll have to do some auto-scrolling if (inputTopBelowSafeArea || inputBottomBelowSafeArea || inputTopAboveSafeArea) { // Input top or bottom below safe area // auto scroll the input up so at least the top of it shows if (safeAreaHeight > inputOffsetHeight) { // safe area height is taller than the input height, so we // can bring up the input just enough to show the input bottom scrollData.scrollAmount = Math.round(safeAreaBottom - inputBottom); } else { // safe area height is smaller than the input height, so we can // only scroll it up so the input top is at the top of the safe area // however the input bottom will be below the safe area scrollData.scrollAmount = Math.round(safeAreaTop - inputTop); } scrollData.inputSafeY = -(inputTop - safeAreaTop) + 4; if (inputTopAboveSafeArea && scrollData.scrollAmount > inputOffsetHeight) { // the input top is above the safe area and we're already scrolling it into place // don't let it scroll more than the height of the input scrollData.scrollAmount = inputOffsetHeight; } } // figure out where it should scroll to for the best position to the input scrollData.scrollTo = (scrollViewDimensions.scrollTop - scrollData.scrollAmount); // when auto-scrolling, there also needs to be enough // content padding at the bottom of the scroll view // always add scroll padding when a text input has focus // this allows for the content to scroll above of the keyboard // content behind the keyboard would be blank // some cases may not need it, but when jumping around it's best // to have the padding already rendered so there's no jank scrollData.scrollPadding = keyboardHeight; // var safeAreaEle: HTMLElement = (<any>window).safeAreaEle; // if (!safeAreaEle) { // safeAreaEle = (<any>window).safeAreaEle = document.createElement('div'); // safeAreaEle.style.cssText = 'position:absolute; padding:1px 5px; left:0; right:0; font-weight:bold; font-size:10px; font-family:Courier; text-align:right; background:rgba(0, 128, 0, 0.8); text-shadow:1px 1px white; pointer-events:none;'; // document.body.appendChild(safeAreaEle); // } // safeAreaEle.style.top = safeAreaTop + 'px'; // safeAreaEle.style.height = safeAreaHeight + 'px'; // safeAreaEle.innerHTML = ` // <div>scrollTo: ${scrollData.scrollTo}</div> // <div>scrollAmount: ${scrollData.scrollAmount}</div> // <div>scrollPadding: ${scrollData.scrollPadding}</div> // <div>inputSafeY: ${scrollData.inputSafeY}</div> // <div>scrollHeight: ${scrollViewDimensions.scrollHeight}</div> // <div>scrollTop: ${scrollViewDimensions.scrollTop}</div> // <div>contentHeight: ${scrollViewDimensions.contentHeight}</div> // <div>plaformHeight: ${plaformHeight}</div> // `; return scrollData; } exports.getScrollData = getScrollData; var SCROLL_ASSIST_SPEED = 0.3; function getScrollAssistDuration(distanceToScroll) { distanceToScroll = Math.abs(distanceToScroll); var duration = distanceToScroll / SCROLL_ASSIST_SPEED; return Math.min(400, Math.max(150, duration)); } }); //# sourceMappingURL=input-base.js.map<|fim▁end|>
{ type: config_1.Config, }, { type: form_1.Form, }, { type: item_1.Item, }, { type: app_1.App, },
<|file_name|>stoplossorder.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from .baserequest import BaseRequest from oandapyV20.types import TradeID, PriceValue from oandapyV20.definitions.orders import TimeInForce, OrderType class StopLossOrderRequest(BaseRequest): """create a StopLossOrderRequest. StopLossOrderRequest is used to build the body for a StopLossOrder. The body can be used to pass to the OrderCreate endpoint. """ def __init__(self, tradeID, price, clientTradeID=None, timeInForce=TimeInForce.GTC, gtdTime=None, clientExtensions=None): """ Instantiate a StopLossOrderRequest. Parameters ---------- tradeID : string (required) the tradeID of an existing trade price : float (required) the treshold price indicating the price to close the order Example ------- >>> import json >>> from oandapyV20 import API >>> import oandapyV20.endpoints.orders as orders >>> from oandapyV20.contrib.requests import StopLossOrderRequest >>> >>> accountID = "..." >>> client = API(access_token=...)<|fim▁hole|> >>> print(json.dumps(ordr.data, indent=4)) { "order": { "type": "STOP_LOSS", "tradeID": "1234", "price": "1.07000", "timeInForce": "GTC", } } >>> # now we have the order specification, create the order request >>> r = orders.OrderCreate(accountID, data=ordr.data) >>> # perform the request >>> rv = client.request(r) >>> print(json.dumps(rv, indent=4)) >>> ... """ super(StopLossOrderRequest, self).__init__() # allowed: GTC/GFD/GTD if timeInForce not in [TimeInForce.GTC, TimeInForce.GTD, TimeInForce.GFD]: raise ValueError("timeInForce: {}".format(timeInForce)) # by default for a STOP_LOSS order self._data.update({"type": OrderType.STOP_LOSS}) # required self._data.update({"tradeID": TradeID(tradeID).value}) self._data.update({"price": PriceValue(price).value}) # optional self._data.update({"clientExtensions": clientExtensions}) self._data.update({"timeInForce": timeInForce}) self._data.update({"gtdTime": gtdTime}) if timeInForce == TimeInForce.GTD and not gtdTime: raise ValueError("gtdTime missing") @property def data(self): """data property. return the JSON body. """ return dict({"order": super(StopLossOrderRequest, self).data})<|fim▁end|>
>>> ordr = StopLossOrderRequest(tradeID="1234", price=1.07)
<|file_name|>top.py<|end_file_name|><|fim▁begin|>import os, sys from datetime import datetime from typing import List import boto3 import botocore.exceptions from . import register_parser from .util import ThreadPoolExecutor from .util.printing import format_table, page_output def get_stats_for_region(region): try: session = boto3.Session(region_name=region) num_instances = len(list(session.resource("ec2").instances.all())) num_amis = len(list(session.resource("ec2").images.filter(Owners=["self"]))) num_vpcs = len(list(session.resource("ec2").vpcs.all())) num_enis = len(list(session.resource("ec2").network_interfaces.all())) num_volumes = len(list(session.resource("ec2").volumes.all())) except botocore.exceptions.ClientError: num_instances, num_amis, num_vpcs, num_enis, num_volumes = ["Access denied"] * 5 # type: ignore return [region, num_instances, num_amis, num_vpcs, num_enis, num_volumes] <|fim▁hole|> table = list(executor.map(get_stats_for_region, boto3.Session().get_available_regions("ec2"))) page_output(format_table(table, column_names=columns, max_col_width=args.max_col_width)) parser = register_parser(top, help='Show an overview of AWS resources per region')<|fim▁end|>
def top(args): table = [] # type: List[List] columns = ["Region", "Instances", "AMIs", "VPCs", "Network interfaces", "EBS volumes"] executor = ThreadPoolExecutor()
<|file_name|>Globals.cpp<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////////////////////////////////////////////////////// // Part of Injectable Generic Camera System // Copyright(c) 2017, Frans Bouma // All rights reserved. // https://github.com/FransBouma/InjectableGenericCameraSystem // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met : // // * Redistributions of source code must retain the above copyright notice, this<|fim▁hole|>// this list of conditions and the following disclaimer in the documentation // and / or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //////////////////////////////////////////////////////////////////////////////////////////////////////// #include "stdafx.h" #include "Globals.h" #include "GameConstants.h" //-------------------------------------------------------------------------------------------------------------------------------- // data shared with asm functions. This is allocated here, 'C' style and not in some datastructure as passing that to // MASM is rather tedious. extern "C" { uint8_t g_cameraEnabled = 0; float g_fovValue = DEFAULT_FOV_DEGREES; } namespace IGCS { Globals::Globals() { } Globals::~Globals() { } Globals &Globals::instance() { static Globals theInstance; return theInstance; } }<|fim▁end|>
// list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice,
<|file_name|>login-new.component.spec.ts<|end_file_name|><|fim▁begin|>import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { LoginNewComponent } from './login-new.component'; describe('LoginNewComponent', () => { let component: LoginNewComponent; let fixture: ComponentFixture<LoginNewComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ LoginNewComponent ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(LoginNewComponent); component = fixture.componentInstance; fixture.detectChanges(); });<|fim▁hole|>});<|fim▁end|>
it('should create', () => { expect(component).toBeTruthy(); });
<|file_name|>tsv-convert.py<|end_file_name|><|fim▁begin|>#!bin/python # TSV to Dublin Core/McMaster Repository conversion tool # Matt McCollow <[email protected]>, 2011 # Nick Ruest <[email protected]>, 2011 from DublinCore import DublinCore import csv from sys import argv from xml.dom.minidom import Document from os.path import basename DC_NS = 'http://purl.org/dc/elements/1.1/' XSI_NS = 'http://www.w3.org/2001/XMLSchema-instance' MACREPO_NS = 'http://repository.mcmaster.ca/schema/macrepo/elements/1.0/' class TabFile(object): """ A dialect for the csv.DictReader constructor """ delimiter = '\t' def parse(fn): """ Parse a TSV file """ try: fp = open(fn) fields = fp.readline().rstrip('\n').split('\t') tsv = csv.DictReader(fp, fieldnames=fields, dialect=TabFile) for row in tsv: dc = makedc(row) writefile(row['dc:identifier'], dc) xml = makexml(row) writefile(row['dc:identifier'], xml) except IOError as (errno, strerror): print "Error ({0}): {1}".format(errno, strerror) raise SystemExit fp.close() def makedc(row): """ Generate a Dublin Core XML file from a TSV """ metadata = DublinCore() metadata.Contributor = row.get('dc:contributor', '') metadata.Coverage = row.get('dc:coverage', '') metadata.Creator = row.get('dc:creator', '') metadata.Date = row.get('dc:date', '') metadata.Description = row.get('dc:description', '') metadata.Format = row.get('dc:format', '') metadata.Identifier = row.get('dc:identifier', '') metadata.Language = row.get('dc:language', '') metadata.Publisher = row.get('dc:publisher', '') metadata.Relation = row.get('dc:relation', '').split('|') metadata.Rights = row.get('dc:rights', '') metadata.Source = row.get('dc:source', '') metadata.Subject = row.get('dc:subject', '') metadata.Title = row.get('dc:title', '') return metadata def makexml(row): """ Generate an XML file conforming to the macrepo schema from a TSV """ doc = Document() root = doc.createElement('metadata') root.setAttribute('xmlns:xsi', XSI_NS) root.setAttribute('xmlns:macrepo', MACREPO_NS)<|fim▁hole|> notes = doc.createElement('macrepo:notes') notes.appendChild(doc.createTextNode(row.get('macrepo:notes', ''))) root.appendChild(notes) scale = doc.createElement('macrepo:scale') scale.appendChild(doc.createTextNode(row.get('macrepo:scale', ''))) root.appendChild(scale) return doc def writefile(name, obj): """ Writes Dublin Core or Macrepo XML object to a file """ if isinstance(obj, DublinCore): fp = open(name + '-DC.xml', 'w') fp.write(obj.makeXML(DC_NS)) elif isinstance(obj, Document): fp = open(name + '-macrepo.xml', 'w') fp.write(obj.toprettyxml()) fp.close() def chkarg(arg): """ Was a TSV file specified? """ return False if len(arg) < 2 else True def usage(): """ Print a nice usage message """ print "Usage: bin/python " + basename(__file__) + " <filename>.tsv" if __name__ == "__main__": if chkarg(argv): parse(argv[1]) else: usage()<|fim▁end|>
doc.appendChild(root) oldnid = doc.createElement('macrepo:oldNid') oldnid.appendChild(doc.createTextNode(row.get('macrepo:oldNid', ''))) root.appendChild(oldnid)
<|file_name|>api_permissions.py<|end_file_name|><|fim▁begin|>from rest_framework.permissions import BasePermission class HasValidProjectToken(BasePermission): """ Return True if the request has a valid project token. """<|fim▁hole|><|fim▁end|>
def has_permission(self, request, view): return bool(request.auth)
<|file_name|>utils.cpp<|end_file_name|><|fim▁begin|>// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifdef _WIN32 #include <conio.h> #else #include <termios.h> #endif #include <iostream> #include <cctype> #include "utils.hpp" double rmw_time_to_seconds(const rmw_time_t & time) { double result = static_cast<double>(time.sec); result += 1e-9 * time.nsec; return result; } void print_qos(const rclcpp::QoS & qos) { const auto & rmw_qos = qos.get_rmw_qos_profile(); std::cout << "HISTORY POLICY: "; switch (rmw_qos.history) { case RMW_QOS_POLICY_HISTORY_KEEP_LAST: std::cout << "keep last"; break; case RMW_QOS_POLICY_HISTORY_KEEP_ALL: std::cout << "keep all"; break; default: std::cout << "invalid"; } std::cout << " (depth: " << rmw_qos.depth << ')' << std::endl; std::cout << "RELIABILITY POLICY: "; switch (rmw_qos.reliability) { case RMW_QOS_POLICY_RELIABILITY_RELIABLE: std::cout << "reliable"; break; case RMW_QOS_POLICY_RELIABILITY_BEST_EFFORT: std::cout << "best effort"; break; default: std::cout << "invalid"; } std::cout << std::endl; std::cout << "DURABILITY POLICY: "; switch (rmw_qos.durability) { case RMW_QOS_POLICY_DURABILITY_TRANSIENT_LOCAL: std::cout << "transient local"; break; case RMW_QOS_POLICY_DURABILITY_VOLATILE: std::cout << "volatile"; break; default: std::cout << "invalid"; } std::cout << std::endl; std::cout << "DEADLINE: " << rmw_time_to_seconds(rmw_qos.deadline) << std::endl; std::cout << "LIFESPAN: " << rmw_time_to_seconds(rmw_qos.lifespan) << std::endl; std::cout << "LIVELINESS POLICY: "; switch (rmw_qos.liveliness) { case RMW_QOS_POLICY_LIVELINESS_AUTOMATIC: std::cout << "automatic"; break; case RMW_QOS_POLICY_LIVELINESS_MANUAL_BY_TOPIC: std::cout << "manual by topic"; break; default: std::cout << "invalid"; } std::cout << " (lease duration: " << rmw_time_to_seconds(rmw_qos.liveliness_lease_duration) << ')' << std::endl; } bool CommandGetter::is_active() const { return run_.load(std::memory_order_relaxed); } void CommandGetter::start() { thread_ = std::thread(std::ref(*this)); run_.store(true, std::memory_order_relaxed); } void CommandGetter::stop() { run_.store(false, std::memory_order_relaxed); thread_.join(); } void CommandGetter::operator()() const { while (run_.load(std::memory_order_relaxed)) { char cmd = getch(); handle_cmd(cmd); } } char CommandGetter::getch() const { #ifdef _WIN32 char ch = _getch(); #else<|fim▁hole|> tcgetattr(0, &old_termios); /* grab old terminal i/o settings */ termios new_termios = old_termios; /* make new settings same as old settings */ new_termios.c_lflag &= ~ICANON; /* disable buffered i/o */ new_termios.c_lflag &= ~ECHO; /* set no echo mode */ tcsetattr(0, TCSANOW, &new_termios); /* use these new terminal i/o settings now */ char ch = getchar(); tcsetattr(0, TCSANOW, &old_termios); /* restore old terminal i/o settings */ #endif return ch; }<|fim▁end|>
termios old_termios;
<|file_name|>TrackingLiteVoAssembler.java<|end_file_name|><|fim▁begin|>//############################################################################# //# # //# Copyright (C) <2014> <IMS MAXIMS> # //# # //# This program is free software: you can redistribute it and/or modify # //# it under the terms of the GNU Affero General Public License as # //# published by the Free Software Foundation, either version 3 of the # //# License, or (at your option) any later version. # //# # //# This program is distributed in the hope that it will be useful, # //# but WITHOUT ANY WARRANTY; without even the implied warranty of # //# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # //# GNU Affero General Public License for more details. # //# # //# You should have received a copy of the GNU Affero General Public License # //# along with this program. If not, see <http://www.gnu.org/licenses/>. # //# # //############################################################################# //#EOH /* * This code was generated * Copyright (C) 1995-2004 IMS MAXIMS plc. All rights reserved. * IMS Development Environment (version 1.80 build 5007.25751) * WARNING: DO NOT MODIFY the content of this file * Generated on 16/04/2014, 12:31 * */ package ims.emergency.vo.domain; import ims.vo.domain.DomainObjectMap; import java.util.HashMap; import org.hibernate.proxy.HibernateProxy; /** * @author Cornel Ventuneac */ public class TrackingLiteVoAssembler { /** * Copy one ValueObject to another * @param valueObjectDest to be updated * @param valueObjectSrc to copy values from */ public static ims.emergency.vo.TrackingLiteVo copy(ims.emergency.vo.TrackingLiteVo valueObjectDest, ims.emergency.vo.TrackingLiteVo valueObjectSrc) { if (null == valueObjectSrc) { <|fim▁hole|> // CurrentArea valueObjectDest.setCurrentArea(valueObjectSrc.getCurrentArea()); // isPrimaryCare valueObjectDest.setIsPrimaryCare(valueObjectSrc.getIsPrimaryCare()); // isDischarged valueObjectDest.setIsDischarged(valueObjectSrc.getIsDischarged()); // LastMovementDateTime valueObjectDest.setLastMovementDateTime(valueObjectSrc.getLastMovementDateTime()); return valueObjectDest; } /** * Create the ValueObject collection to hold the set of DomainObjects. * This is a convenience method only. * It is intended to be used when one called to an Assembler is made. * If more than one call to an Assembler is made then #createTrackingLiteVoCollectionFromTracking(DomainObjectMap, Set) should be used. * @param domainObjectSet - Set of ims.emergency.domain.objects.Tracking objects. */ public static ims.emergency.vo.TrackingLiteVoCollection createTrackingLiteVoCollectionFromTracking(java.util.Set domainObjectSet) { return createTrackingLiteVoCollectionFromTracking(new DomainObjectMap(), domainObjectSet); } /** * Create the ValueObject collection to hold the set of DomainObjects. * @param map - maps DomainObjects to created ValueObjects * @param domainObjectSet - Set of ims.emergency.domain.objects.Tracking objects. */ public static ims.emergency.vo.TrackingLiteVoCollection createTrackingLiteVoCollectionFromTracking(DomainObjectMap map, java.util.Set domainObjectSet) { ims.emergency.vo.TrackingLiteVoCollection voList = new ims.emergency.vo.TrackingLiteVoCollection(); if ( null == domainObjectSet ) { return voList; } int rieCount=0; int activeCount=0; java.util.Iterator iterator = domainObjectSet.iterator(); while( iterator.hasNext() ) { ims.emergency.domain.objects.Tracking domainObject = (ims.emergency.domain.objects.Tracking) iterator.next(); ims.emergency.vo.TrackingLiteVo vo = create(map, domainObject); if (vo != null) voList.add(vo); if (domainObject != null) { if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true) rieCount++; else activeCount++; } } voList.setRieCount(rieCount); voList.setActiveCount(activeCount); return voList; } /** * Create the ValueObject collection to hold the list of DomainObjects. * @param domainObjectList - List of ims.emergency.domain.objects.Tracking objects. */ public static ims.emergency.vo.TrackingLiteVoCollection createTrackingLiteVoCollectionFromTracking(java.util.List domainObjectList) { return createTrackingLiteVoCollectionFromTracking(new DomainObjectMap(), domainObjectList); } /** * Create the ValueObject collection to hold the list of DomainObjects. * @param map - maps DomainObjects to created ValueObjects * @param domainObjectList - List of ims.emergency.domain.objects.Tracking objects. */ public static ims.emergency.vo.TrackingLiteVoCollection createTrackingLiteVoCollectionFromTracking(DomainObjectMap map, java.util.List domainObjectList) { ims.emergency.vo.TrackingLiteVoCollection voList = new ims.emergency.vo.TrackingLiteVoCollection(); if ( null == domainObjectList ) { return voList; } int rieCount=0; int activeCount=0; for (int i = 0; i < domainObjectList.size(); i++) { ims.emergency.domain.objects.Tracking domainObject = (ims.emergency.domain.objects.Tracking) domainObjectList.get(i); ims.emergency.vo.TrackingLiteVo vo = create(map, domainObject); if (vo != null) voList.add(vo); if (domainObject != null) { if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true) rieCount++; else activeCount++; } } voList.setRieCount(rieCount); voList.setActiveCount(activeCount); return voList; } /** * Create the ims.emergency.domain.objects.Tracking set from the value object collection. * @param domainFactory - used to create existing (persistent) domain objects. * @param voCollection - the collection of value objects */ public static java.util.Set extractTrackingSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.TrackingLiteVoCollection voCollection) { return extractTrackingSet(domainFactory, voCollection, null, new HashMap()); } public static java.util.Set extractTrackingSet(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.TrackingLiteVoCollection voCollection, java.util.Set domainObjectSet, HashMap domMap) { int size = (null == voCollection) ? 0 : voCollection.size(); if (domainObjectSet == null) { domainObjectSet = new java.util.HashSet(); } java.util.Set newSet = new java.util.HashSet(); for(int i=0; i<size; i++) { ims.emergency.vo.TrackingLiteVo vo = voCollection.get(i); ims.emergency.domain.objects.Tracking domainObject = TrackingLiteVoAssembler.extractTracking(domainFactory, vo, domMap); //TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it. if (domainObject == null) { continue; } //Trying to avoid the hibernate collection being marked as dirty via its public interface methods. (like add) if (!domainObjectSet.contains(domainObject)) domainObjectSet.add(domainObject); newSet.add(domainObject); } java.util.Set removedSet = new java.util.HashSet(); java.util.Iterator iter = domainObjectSet.iterator(); //Find out which objects need to be removed while (iter.hasNext()) { ims.domain.DomainObject o = (ims.domain.DomainObject)iter.next(); if ((o == null || o.getIsRIE() == null || !o.getIsRIE().booleanValue()) && !newSet.contains(o)) { removedSet.add(o); } } iter = removedSet.iterator(); //Remove the unwanted objects while (iter.hasNext()) { domainObjectSet.remove(iter.next()); } return domainObjectSet; } /** * Create the ims.emergency.domain.objects.Tracking list from the value object collection. * @param domainFactory - used to create existing (persistent) domain objects. * @param voCollection - the collection of value objects */ public static java.util.List extractTrackingList(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.TrackingLiteVoCollection voCollection) { return extractTrackingList(domainFactory, voCollection, null, new HashMap()); } public static java.util.List extractTrackingList(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.TrackingLiteVoCollection voCollection, java.util.List domainObjectList, HashMap domMap) { int size = (null == voCollection) ? 0 : voCollection.size(); if (domainObjectList == null) { domainObjectList = new java.util.ArrayList(); } for(int i=0; i<size; i++) { ims.emergency.vo.TrackingLiteVo vo = voCollection.get(i); ims.emergency.domain.objects.Tracking domainObject = TrackingLiteVoAssembler.extractTracking(domainFactory, vo, domMap); //TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it. if (domainObject == null) { continue; } int domIdx = domainObjectList.indexOf(domainObject); if (domIdx == -1) { domainObjectList.add(i, domainObject); } else if (i != domIdx && i < domainObjectList.size()) { Object tmp = domainObjectList.get(i); domainObjectList.set(i, domainObjectList.get(domIdx)); domainObjectList.set(domIdx, tmp); } } //Remove all ones in domList where index > voCollection.size() as these should //now represent the ones removed from the VO collection. No longer referenced. int i1=domainObjectList.size(); while (i1 > size) { domainObjectList.remove(i1-1); i1=domainObjectList.size(); } return domainObjectList; } /** * Create the ValueObject from the ims.emergency.domain.objects.Tracking object. * @param domainObject ims.emergency.domain.objects.Tracking */ public static ims.emergency.vo.TrackingLiteVo create(ims.emergency.domain.objects.Tracking domainObject) { if (null == domainObject) { return null; } DomainObjectMap map = new DomainObjectMap(); return create(map, domainObject); } /** * Create the ValueObject from the ims.emergency.domain.objects.Tracking object. * @param map DomainObjectMap of DomainObjects to already created ValueObjects. * @param domainObject */ public static ims.emergency.vo.TrackingLiteVo create(DomainObjectMap map, ims.emergency.domain.objects.Tracking domainObject) { if (null == domainObject) { return null; } // check if the domainObject already has a valueObject created for it ims.emergency.vo.TrackingLiteVo valueObject = (ims.emergency.vo.TrackingLiteVo) map.getValueObject(domainObject, ims.emergency.vo.TrackingLiteVo.class); if ( null == valueObject ) { valueObject = new ims.emergency.vo.TrackingLiteVo(domainObject.getId(), domainObject.getVersion()); map.addValueObject(domainObject, valueObject); valueObject = insert(map, valueObject, domainObject); } return valueObject; } /** * Update the ValueObject with the Domain Object. * @param valueObject to be updated * @param domainObject ims.emergency.domain.objects.Tracking */ public static ims.emergency.vo.TrackingLiteVo insert(ims.emergency.vo.TrackingLiteVo valueObject, ims.emergency.domain.objects.Tracking domainObject) { if (null == domainObject) { return valueObject; } DomainObjectMap map = new DomainObjectMap(); return insert(map, valueObject, domainObject); } /** * Update the ValueObject with the Domain Object. * @param map DomainObjectMap of DomainObjects to already created ValueObjects. * @param valueObject to be updated * @param domainObject ims.emergency.domain.objects.Tracking */ public static ims.emergency.vo.TrackingLiteVo insert(DomainObjectMap map, ims.emergency.vo.TrackingLiteVo valueObject, ims.emergency.domain.objects.Tracking domainObject) { if (null == domainObject) { return valueObject; } if (null == map) { map = new DomainObjectMap(); } valueObject.setID_Tracking(domainObject.getId()); valueObject.setIsRIE(domainObject.getIsRIE()); // If this is a recordedInError record, and the domainObject // value isIncludeRecord has not been set, then we return null and // not the value object if (valueObject.getIsRIE() != null && valueObject.getIsRIE().booleanValue() == true && !domainObject.isIncludeRecord()) return null; // If this is not a recordedInError record, and the domainObject // value isIncludeRecord has been set, then we return null and // not the value object if ((valueObject.getIsRIE() == null || valueObject.getIsRIE().booleanValue() == false) && domainObject.isIncludeRecord()) return null; // CurrentArea if (domainObject.getCurrentArea() != null) { if(domainObject.getCurrentArea() instanceof HibernateProxy) // If the proxy is set, there is no need to lazy load, the proxy knows the id already. { HibernateProxy p = (HibernateProxy) domainObject.getCurrentArea(); int id = Integer.parseInt(p.getHibernateLazyInitializer().getIdentifier().toString()); valueObject.setCurrentArea(new ims.emergency.configuration.vo.TrackingAreaRefVo(id, -1)); } else { valueObject.setCurrentArea(new ims.emergency.configuration.vo.TrackingAreaRefVo(domainObject.getCurrentArea().getId(), domainObject.getCurrentArea().getVersion())); } } // isPrimaryCare valueObject.setIsPrimaryCare( domainObject.isIsPrimaryCare() ); // isDischarged valueObject.setIsDischarged( domainObject.isIsDischarged() ); // LastMovementDateTime java.util.Date LastMovementDateTime = domainObject.getLastMovementDateTime(); if ( null != LastMovementDateTime ) { valueObject.setLastMovementDateTime(new ims.framework.utils.DateTime(LastMovementDateTime) ); } return valueObject; } /** * Create the domain object from the value object. * @param domainFactory - used to create existing (persistent) domain objects. * @param valueObject - extract the domain object fields from this. */ public static ims.emergency.domain.objects.Tracking extractTracking(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.TrackingLiteVo valueObject) { return extractTracking(domainFactory, valueObject, new HashMap()); } public static ims.emergency.domain.objects.Tracking extractTracking(ims.domain.ILightweightDomainFactory domainFactory, ims.emergency.vo.TrackingLiteVo valueObject, HashMap domMap) { if (null == valueObject) { return null; } Integer id = valueObject.getID_Tracking(); ims.emergency.domain.objects.Tracking domainObject = null; if ( null == id) { if (domMap.get(valueObject) != null) { return (ims.emergency.domain.objects.Tracking)domMap.get(valueObject); } // ims.emergency.vo.TrackingLiteVo ID_Tracking field is unknown domainObject = new ims.emergency.domain.objects.Tracking(); domMap.put(valueObject, domainObject); } else { String key = (valueObject.getClass().getName() + "__" + valueObject.getID_Tracking()); if (domMap.get(key) != null) { return (ims.emergency.domain.objects.Tracking)domMap.get(key); } domainObject = (ims.emergency.domain.objects.Tracking) domainFactory.getDomainObject(ims.emergency.domain.objects.Tracking.class, id ); //TODO: Not sure how this should be handled. Effectively it must be a staleobject exception, but maybe should be handled as that further up. if (domainObject == null) return null; domMap.put(key, domainObject); } domainObject.setVersion(valueObject.getVersion_Tracking()); ims.emergency.configuration.domain.objects.TrackingArea value1 = null; if ( null != valueObject.getCurrentArea() ) { if (valueObject.getCurrentArea().getBoId() == null) { if (domMap.get(valueObject.getCurrentArea()) != null) { value1 = (ims.emergency.configuration.domain.objects.TrackingArea)domMap.get(valueObject.getCurrentArea()); } } else if (valueObject.getBoVersion() == -1) // RefVo was not modified since obtained from the Assembler, no need to update the BO field { value1 = domainObject.getCurrentArea(); } else { value1 = (ims.emergency.configuration.domain.objects.TrackingArea)domainFactory.getDomainObject(ims.emergency.configuration.domain.objects.TrackingArea.class, valueObject.getCurrentArea().getBoId()); } } domainObject.setCurrentArea(value1); domainObject.setIsPrimaryCare(valueObject.getIsPrimaryCare()); domainObject.setIsDischarged(valueObject.getIsDischarged()); ims.framework.utils.DateTime dateTime4 = valueObject.getLastMovementDateTime(); java.util.Date value4 = null; if ( dateTime4 != null ) { value4 = dateTime4.getJavaDate(); } domainObject.setLastMovementDateTime(value4); return domainObject; } }<|fim▁end|>
return valueObjectSrc; } valueObjectDest.setID_Tracking(valueObjectSrc.getID_Tracking()); valueObjectDest.setIsRIE(valueObjectSrc.getIsRIE());
<|file_name|>user.ts<|end_file_name|><|fim▁begin|>export class User { constructor( public login: string,<|fim▁hole|>}<|fim▁end|>
public avatar: string ) {}
<|file_name|>DslField.java<|end_file_name|><|fim▁begin|>/* * Copyright 2017 Courtanet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.doov.core.dsl; import io.doov.core.FieldId; import io.doov.core.dsl.impl.DefaultCondition; import io.doov.core.dsl.lang.Readable; /** * Interface for all field types. * * Generic type parameter {@link T} defines the type of the field. */<|fim▁hole|> FieldId id(); /** * Returns a new default condition that will use this as a field. * * @return the default condition */ DefaultCondition<T> getDefaultCondition(); }<|fim▁end|>
public interface DslField<T> extends Readable {
<|file_name|>search.py<|end_file_name|><|fim▁begin|>""" /** * Ossec Framework * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * @category Ossec * @package Ossec * @version $Id: Histogram.php,v 1.3 2008/03/03 15:12:18 dcid Exp $ * @author Chris Abernethy * @copyright Copyright (c) 2007-2008, Daniel B. Cid <[email protected]>, All rights reserved. * @license http://www.gnu.org/licenses/gpl-3.0.txt GNU Public License */ """ ############################################################## # Copyright C) 2015 Masashi Okumura All rights reseerved. ############################################################## import os,sys import re from flask import Flask, session, request, redirect, render_template, url_for from flask import jsonify, make_response from datetime import * import time import uuid import hashlib import ossec_conf import os_lib_handle import os_lib_agent import os_lib_alerts #import os_lib_syscheck from ossec_categories import global_categories from ossec_formats import log_categories from .view import View class Search(View): def __init__(self, request): super().__init__() self.request = request self.html = "" self.contents= "" self.is_post = False if request.method == 'POST': self.is_post = True self._make_contents() self._make_html() def _make_contents(self): # Starting handle ossec_handle = os_lib_handle.os_handle_start(ossec_conf.ossec_dir) # Iniitializing some variables u_final_time = int(time.time()) #u_final_time = int(time.mktime(datetime.now().timetuple())) u_init_time = int(u_final_time - ossec_conf.ossec_search_time) # 14400 = 3600 * 4 u_level = ossec_conf.ossec_search_level # 7 u_pattern = "" u_rule = "" u_srcip = "" u_user = "" u_location = "" # masao added the folloings : USER_final = 0 USER_init = 0 USER_level = "" USER_pattern = None LOCATION_pattern = None USER_group = None USER_log = None USER_rule = None USER_srcip = None USER_user = None USER_page = int(1) USER_searchid = 0 USER_monitoring = 0 used_stored = 0 buffer = "" # Getting search id if self.is_post and ('searchid' in self.request.form): str_searchid = self.request.form.get('searchid') if re.search("[a-z0-9]+", str_searchid): USER_searchid = str_searchid # It might be hex. dont use int(). is_rt_monitoring = False # TODO : real time monitoring t.b. implemented. rt_sk = "" sv_sk = 'checked="checked"' if self.is_post and ('monitoring' in self.request.form): str_monitoring = self.request.form.get('monitoring') if int(str_monitoring) == 1: is_rt_monitoring = True rt_sk = 'checked="checked"' sv_sk = ""; # Cleaning up time USER_final = u_final_time USER_init = u_init_time USER_monitoring = 1 # Cleaning up fields # $_POST['search'] = "Search"; # unset($_POST['initdate']);<|fim▁hole|> # Deleting search if USER_searchid != 0: os_lib_alerts.os_cleanstored(USER_searchid) # Refreshing every 90 seconds by default */ m_ossec_refresh_time = ossec_conf.ossec_refresh_time * 1000; buffer += """\ <script language="javascript"> setTimeout("document.dosearch.submit()", %d); </script>\n""" % m_ossec_refresh_time # Reading user input -- being very careful parsing it # Initial Date datepattern = "^([0-9]{4})-([0-9]{2})-([0-9]{2}) ([0-9]{2}):([0-9]{2})$"; if is_rt_monitoring: pass elif self.is_post and ('initdate' in self.request.form): str_initdate = self.request.form.get('initdate') mobj = re.search(datepattern, str_initdate) if mobj: year = int(mobj.group(1)) month = int(mobj.group(2)) day = int(mobj.group(3)) hour = int(mobj.group(4)) minute = int(mobj.group(5)) USER_init = int(time.mktime((year, month, day, hour, minute, 0, 0, 0, -1))) u_init_time = USER_init # to check : # print(datetime.fromtimestamp(u_init_time)) # Final Date if is_rt_monitoring: pass elif self.is_post and ('finaldate' in self.request.form): str_finaldate = self.request.form.get('finaldate') mobj = re.search(datepattern, str_finaldate) if mobj: year = int(mobj.group(1)) month = int(mobj.group(2)) day = int(mobj.group(3)) hour = int(mobj.group(4)) minute = int(mobj.group(5)) USER_final = int(time.mktime((year, month, day, hour, minute, 0, 0, 0, -1))) u_final_time = USER_final # Level if self.is_post and ('level' in self.request.form): str_level = self.request.form.get('level') if str_level and str_level.isdigit() and (int(str_level) > 0) and (int(str_level) < 16): USER_level = str_level u_level = str_level # Page if self.is_post and ('page' in self.request.form): str_page = self.request.form.get('page') if str_page and str_page.isdigit() and (int(str_page) > 0) and (int(str_page) <= 999): USER_page = str_page # Pattern strpattern = "^[0-9a-zA-Z. _|^!\-()?]{1,128}$" intpattern = "^[0-9]{1,8}$" if self.is_post and ('strpattern' in self.request.form): str_strpattern = self.request.form.get('strpattern') if re.search(strpattern, str_strpattern): USER_pattern = str_strpattern u_pattern = USER_pattern # Getting location if self.is_post and ('locationpattern' in self.request.form): lcpattern = "^[0-9a-zA-Z. _|^!>\/\\-]{1,156}$" str_locationpattern = self.request.form.get('locationpattern') if re.search(lcpattern, str_locationpattern): LOCATION_pattern = str_locationpattern u_location = LOCATION_pattern # Group pattern if self.is_post and ('grouppattern' in self.request.form): str_grouppattern = self.request.form.get('grouppattern') if str_grouppattern == "ALL": USER_group = None elif re.search(strpattern, str_grouppattern): USER_group = str_grouppattern pass # Log pattern if self.is_post and ('logpattern' in self.request.form): str_logpattern = self.request.form.get('logpattern') if str_logpattern == "ALL": USER_log = None elif re.search(strpattern, str_logpattern): USER_log = str_logpattern # Rule pattern if self.is_post and ('rulepattern' in self.request.form): str_rulepattern = self.request.form.get('rulepattern') if re.search(strpattern, str_rulepattern): USER_rule = str_rulepattern u_rule = USER_rule # Src ip pattern if self.is_post and ('srcippattern' in self.request.form): str_srcippattern = self.request.form.get('srcippattern') if re.search(strpattern, str_srcippattern): USER_srcip = str_srcippattern u_srcip = USER_srcip # User pattern if self.is_post and ('userpattern' in self.request.form): str_userpattern = self.request.form.get('userpattern') if re.search(strpattern, str_userpattern): USER_user = str_userpattern u_user = USER_user # Maximum number of alerts if self.is_post and ('max_alerts_per_page' in self.request.form): str_max_alerts_per_page = self.request.form.get('max_alerts_per_page') if re.search(intpattern, str_max_alerts_per_page): int_max_alerts_per_page = int (str_max_alerts_per_page) if (int_max_alerts_per_page > 200) and (int_max_alerts_per_page < 10000): ossec_conf.ossec_max_alerts_per_page = int_max_alerts_per_page # Getting search id -- should be enough to avoid duplicates if is_rt_monitoring: # 'get('search') is "Search" m = hashlib.md5() m.update(str(uuid.uuid4()).encode('UTF-8')) USER_searchid = m.hexdigest() USER_page = 1 elif self.is_post and ('search' in self.request.form): str_search = self.request.form.get('search') # ImmutableMultiDict([('initdate', '2015-07-21 15:00'), ('level', '3'), ('search', 'Search'), ('monitoring', '0'), ('finaldate', '2015-07-21 19:00'), ('searchid', '0')]) if str_search == "Search": # Creating new search id # (in php) $USER_searchid = md5(uniqid(rand(), true)); m = hashlib.md5() m.update(str(uuid.uuid4()).encode('UTF-8')) USER_searchid = m.hexdigest() USER_page = 1 elif str_search == "<< First": USER_page = 1 elif str_search == "< Prev": if int(USER_page) > 1: UESR_page = int(USER_page) - 1 elif str_search == "Next >": USER_page = int(USER_page) + 1 elif str_search == "Last >>": USER_page = 999 elif str_search == "": pass else: buffer += "<b class='red'>Invalid search. </b><br />\n" self.contents = buffer return # Printing current date buffer += """<div class="smaller2">%s<br/>""" % datetime.now().strftime("%m/%d/%Y %H:%M:%S") if USER_monitoring == 1: buffer += """ -- Refreshing every %s secs</div><br />""" % ossec_conf.ossec_refresh_time else: buffer += "</div><br/>" # Getting all agents agent_list = os_lib_agent.os_getagents(ossec_handle) buffer += "<h2>Alert search options:</h2>\n" ################# ### Search forms ### ################# buffer += """\ <form name="dosearch" method="post" action="/search"> <table><tr valign="top"> <td><input type="radio" name="monitoring" value="0" checked="checked"/></td> <td>From: &nbsp;<input type="text" name="initdate" id="i_date_a" size="17" value="%s" maxlength="16" class="formText" /> <img src="static/img/calendar.gif" id="i_trigger" title="Date selector" alt="Date selector" class="formText" /></td> <td>&nbsp;&nbsp;&nbsp;To: &nbsp;<input type="text" name="finaldate" id="f_date_a" size="17" value="%s" maxlength="16" class="formText" /> <img src="static/img/calendar.gif" id="f_trigger" title="Date selector" alt="Date selector" class="formText" /></td> </tr> """ % ( datetime.fromtimestamp(u_init_time).strftime("%Y-%m-%d %H:%M"), datetime.fromtimestamp(u_final_time).strftime("%Y-%m-%d %H:%M") ) buffer += """<tr><td><input type="radio" name="monitoring" value="1" %s/></td> <td>Real time monitoring</td></tr> </table> <br /> <table> """ % rt_sk # Minimum Level buffer += """<tr><td>Minimum level:</td><td><select name="level" class="formText">""" if int(u_level) == 1: buffer += ' <option value="1" selected="selected">All</option>' else: buffer += ' <option value="1">All</option>' for l_counter in range(15, 1, -1): if l_counter == int(u_level): buffer += ' <option value="%s" selected="selected">%s</option>' % (l_counter, l_counter) else: buffer += ' <option value="%s">%s</option>' % (l_counter, l_counter) buffer += "</select>" # Category buffer += """</td><td> Category: </td><td><select name="grouppattern" class="formText">""" buffer += '<option value="ALL" class="bluez">All categories</option>' for _cat_name, _cat in global_categories.items(): for cat_name, cat_val in _cat.items(): sl = "" if USER_group == cat_val: sl = ' selected="selected"' if cat_name.find("(all)") != -1: buffer += """<option class="bluez" %s value="%s">%s</option>""" % (sl, cat_val, cat_name) else: buffer += """<option value="%s" %s> &nbsp; %s</option>""" % (cat_val, sl, cat_name) buffer += '</select>' # Str pattern buffer += """</td></tr><tr><td> Pattern: </td><td><input type="text" name="strpattern" size="16" value="%s" class="formText" /></td>""" % u_pattern # Log formats buffer += '<td>Log formats: </td><td><select name="logpattern" class="formText">' buffer += '<option value="ALL" class="bluez">All log formats</option>' for _cat_name, _cat in log_categories.items(): for cat_name, cat_val in _cat.items(): sl = "" if USER_log == cat_val: sl = ' selected="selected"' if cat_name.find("(all)") != -1: buffer += """<option class="bluez" %s value="%s">%s</option>"""% (sl, cat_val, cat_name) else: buffer += """<option value="%s" %s> &nbsp; %s</option>""" % (cat_val, sl, cat_name) buffer += '</select>' # Srcip pattern buffer += """</td></tr><tr><td> Srcip: </td><td> <input type="text" name="srcippattern" size="16" class="formText" value="%s"/>&nbsp;&nbsp;""" % u_srcip # Rule pattern buffer += """</td><td> User: </td><td><input type="text" name="userpattern" size="8" value="%s" class="formText" /></td></tr>""" % u_user # Location buffer += """<tr><td> Location:</td><td> <input type="text" name="locationpattern" size="16" class="formText" value="%s"/>&nbsp;&nbsp;""" % u_location # Rule pattern buffer += """</td><td> Rule id: </td><td><input type="text" name="rulepattern" size="8" value="%s" class="formText"/>""" % u_rule # Max alerts buffer += """'</td></tr><tr><td> Max Alerts:</td> <td><input type="text" name="max_alerts_per_page" size="8" value="%s" class="formText" /></td></tr> """ % ossec_conf.ossec_max_alerts_per_page # Agent # seems not implemented # Final form buffer += """\ <tr><td> <input type="submit" name="search" value="Search" class="button" /> """ buffer += """</td></tr></table> <input type="hidden" name="searchid" value="%s" /> </form><br /> <br />""" % USER_searchid # Java script for date buffer += """\ <script type="text/javascript"> Calendar.setup({ button : "i_trigger", inputField : "i_date_a", ifFormat : "%Y-%m-%d %H:%M", showsTime : true, timeFormat : "24" }); Calendar.setup({ button : "f_trigger", inputField : "f_date_a", ifFormat : "%Y-%m-%d %H:%M", showsTime : true, timeFormat : "24" }); </script> """ buffer += "<h2>Results:</h2>\n" if (not USER_init) or (not USER_final) or (not USER_level): buffer += "<b>No search performed.</b><br/>\n" self.contents = buffer return output_list = None # Getting stored alerts if is_rt_monitoring: # Getting alerts output_list = os_lib_alerts.os_searchalerts(ossec_handle, USER_searchid, USER_init, USER_final, ossec_conf.ossec_max_alerts_per_page, USER_level, USER_rule, LOCATION_pattern, USER_pattern, USER_group, USER_srcip, USER_user, USER_log) elif self.is_post and ('search' in request.form): str_search = self.request.form.get("search") if str_search != "Search": output_list = os_lib_alerts.os_getstoredalerts(ossec_handle, USER_searchid) used_stored = 1 else: # Searchiing for new ones # Getting alerts output_list = os_lib_alerts.os_searchalerts(ossec_handle, USER_searchid, USER_init, USER_final, ossec_conf.ossec_max_alerts_per_page, USER_level, USER_rule, LOCATION_pattern, USER_pattern, USER_group, USER_srcip, USER_user, USER_log) if (output_list is None) or (output_list[1] is None): if used_stored == 1: buffer += "<b class='red'>Nothing returned (search expired). </b><br />\n" else: buffer += "<b class='red'>Nothing returned. </b><br />\n" self.contents = buffer return # Checking for no return if not 'count' in output_list[0]: buffer += "<b class='red'>Nothing returned. </b><br />\n" self.contents = buffer return # Checking maximum page size if int(USER_page) >= int(output_list[0]['pg']): USER_page = output_list[0]['pg'] # Page 1 will become the latest and the latest, page 1 real_page = (output_list[0]['pg'] + 1) - USER_page buffer += "<b>Total alerts found: </b>%s<br />" % output_list[0]['count'] if output_list[0]['pg'] > 1: buffer += "<b>Output divided in </b>%s pages.<br/>" % output_list[0]['pg'] buffer += '<br /><form name="dopage" method="post" action="/search">' buffer += """\ <input type="submit" name="search" value="<< First" class="button" class="formText" /> <input type="submit" name="search" value="< Prev" class="button" class="formText" /> Page <b>%s</b> (%s alerts)""" % (USER_page, output_list[0][real_page]) # Currently page buffer += """\ <input type="hidden" name="initdate" value="%s" /> <input type="hidden" name="finaldate" value="%s" /> <input type="hidden" name="rulepattern" value="%s" /> <input type="hidden" name="srcippattern" value="%s" /> <input type="hidden" name="userpattern" value="'%s" /> <input type="hidden" name="locationpattern" value="%s" /> <input type="hidden" name="level" value="%s" /> <input type="hidden" name="page" value="%s" /> <input type="hidden" name="searchid" value="%s" /> <input type="hidden" name="monitoring" value="%s" /> <input type="hidden" name="max_alerts_per_page" value="%s" /> """ % ( datetime.fromtimestamp(u_init_time).strftime("%Y-%m-%d %H:%M"), datetime.fromtimestamp(u_final_time).strftime("%Y-%m-%d %H:%M"), u_rule, u_srcip, u_user, u_location, u_level, USER_page, USER_searchid, USER_monitoring, ossec_conf.ossec_max_alerts_per_page ) if output_list[0]['pg'] > 1: buffer += """\ &nbsp;&nbsp; <input type="submit" name="search" value="Next >" class="button" class="formText" /> <input type="submit" name="search" value="Last >>" class="button" class="formText" /> </form> """ # Checking if page exists target = output_list[real_page] target_file = os.environ['CCPRISM_HOME'] + target print("real_page is %s" %real_page) print("target_file is " + target_file) print(output_list[0].keys()) if 'count' in output_list[0].keys(): print("count key exists.") if 'pg' in output_list[0].keys(): print ('pg key exists') if real_page in output_list[0].keys(): print("real_page key exists.") if (not real_page in output_list[0].keys()) or (len(target) < 5) or (not os.path.exists(target_file)): #if (not output_list[0][real_page]) or (len(target) < 5) or (not os.path.exists(target_file)): print("heyheyhey") buffer += "<b class='red'>Nothing returned (or search expired). (* 1)</b><br />\n" self.contents = buffer return buffer += "<br/><br/>" # Printing page # TODO: There are functions for slurping file contents. fobj = open(target_file, 'r') target_buffer = fobj.read() fobj.close() buffer += target_buffer self.contents = buffer def _make_html(self): self.html = """\ <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> %s </head> <body> <br/> %s <div id="container"> <div id="content_box"> <div id="content" class="pages"> <a name="top"></a> <!-- BEGIN: content --> %s <!-- END: content --> <br /><br /> <br /><br /> </div> </div> %s </div> </body> </html> """ % (View.HEAD, View.HEADER, self.contents, View.FOOTER) pass def getHtml(self): return self.html<|fim▁end|>
# unset($_POST['finaldate']);
<|file_name|>to_TO_test.go<|end_file_name|><|fim▁begin|>package to_TO import ( "testing" "time" "github.com/go-playground/locales" "github.com/go-playground/locales/currency" ) func TestLocale(t *testing.T) { trans := New() expected := "to_TO" if trans.Locale() != expected { t.Errorf("Expected '%s' Got '%s'", expected, trans.Locale()) } } func TestPluralsRange(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsRange() // expected := 1 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsOrdinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleTwo, // }, // { // expected: locales.PluralRuleFew, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsOrdinal() // expected := 4 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestPluralsCardinal(t *testing.T) { trans := New() tests := []struct { expected locales.PluralRule }{ // { // expected: locales.PluralRuleOne, // }, // { // expected: locales.PluralRuleOther, // }, } rules := trans.PluralsCardinal() // expected := 2 // if len(rules) != expected { // t.Errorf("Expected '%d' Got '%d'", expected, len(rules)) // } for _, tt := range tests { r := locales.PluralRuleUnknown for i := 0; i < len(rules); i++ { if rules[i] == tt.expected { r = rules[i] break } } if r == locales.PluralRuleUnknown { t.Errorf("Expected '%s' Got '%s'", tt.expected, r) } } } func TestRangePlurals(t *testing.T) { trans := New() tests := []struct { num1 float64 v1 uint64 num2 float64 v2 uint64 expected locales.PluralRule }{ // { // num1: 1, // v1: 1, // num2: 2, // v2: 2, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.RangePluralRule(tt.num1, tt.v1, tt.num2, tt.v2) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestOrdinalPlurals(t *testing.T) { trans := New() tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 2, // v: 0, // expected: locales.PluralRuleTwo, // }, // { // num: 3, // v: 0, // expected: locales.PluralRuleFew, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.OrdinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestCardinalPlurals(t *testing.T) { trans := New() tests := []struct { num float64 v uint64 expected locales.PluralRule }{ // { // num: 1, // v: 0, // expected: locales.PluralRuleOne, // }, // { // num: 4, // v: 0, // expected: locales.PluralRuleOther, // }, } for _, tt := range tests { rule := trans.CardinalPluralRule(tt.num, tt.v) if rule != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, rule) } } } func TestDaysAbbreviated(t *testing.T) { trans := New() days := trans.WeekdaysAbbreviated() for i, day := range days { s := trans.WeekdayAbbreviated(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sun", // }, // { // idx: 1, // expected: "Mon", // }, // { // idx: 2, // expected: "Tue", // }, // { // idx: 3, // expected: "Wed", // }, // { // idx: 4, // expected: "Thu", // }, // { // idx: 5, // expected: "Fri", // }, // { // idx: 6, // expected: "Sat", // }, } for _, tt := range tests { s := trans.WeekdayAbbreviated(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysNarrow(t *testing.T) { trans := New() days := trans.WeekdaysNarrow() for i, day := range days { s := trans.WeekdayNarrow(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", string(day), s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "S", // }, // { // idx: 1, // expected: "M", // }, // { // idx: 2, // expected: "T", // }, // { // idx: 3, // expected: "W", // }, // { // idx: 4, // expected: "T", // }, // { // idx: 5, // expected: "F", // }, // { // idx: 6, // expected: "S", // }, } for _, tt := range tests { s := trans.WeekdayNarrow(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysShort(t *testing.T) { trans := New() days := trans.WeekdaysShort() for i, day := range days { s := trans.WeekdayShort(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Su", // }, // { // idx: 1, // expected: "Mo", // }, // { // idx: 2, // expected: "Tu", // }, // { // idx: 3, // expected: "We", // }, // { // idx: 4, // expected: "Th", // }, // { // idx: 5, // expected: "Fr", // }, // { // idx: 6, // expected: "Sa", // }, } for _, tt := range tests { s := trans.WeekdayShort(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestDaysWide(t *testing.T) { trans := New() days := trans.WeekdaysWide() for i, day := range days { s := trans.WeekdayWide(time.Weekday(i)) if s != day { t.Errorf("Expected '%s' Got '%s'", day, s) } } tests := []struct { idx int expected string }{ // { // idx: 0, // expected: "Sunday", // }, // { // idx: 1, // expected: "Monday", // }, // { // idx: 2, // expected: "Tuesday", // }, // { // idx: 3, // expected: "Wednesday", // }, // { // idx: 4, // expected: "Thursday", // }, // { // idx: 5, // expected: "Friday", // }, // { // idx: 6, // expected: "Saturday", // }, } for _, tt := range tests { s := trans.WeekdayWide(time.Weekday(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsAbbreviated(t *testing.T) { trans := New() months := trans.MonthsAbbreviated() for i, month := range months { s := trans.MonthAbbreviated(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "Jan", // }, // { // idx: 2, // expected: "Feb", // }, // { // idx: 3, // expected: "Mar", // }, // { // idx: 4, // expected: "Apr", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "Jun", // }, // { // idx: 7, // expected: "Jul", // }, // { // idx: 8, // expected: "Aug", // }, // { // idx: 9, // expected: "Sep", // }, // { // idx: 10, // expected: "Oct", // }, // { // idx: 11, // expected: "Nov", // }, // { // idx: 12, // expected: "Dec", // }, } for _, tt := range tests { s := trans.MonthAbbreviated(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsNarrow(t *testing.T) { trans := New() months := trans.MonthsNarrow() for i, month := range months { s := trans.MonthNarrow(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "J", // }, // { // idx: 2, // expected: "F", // }, // { // idx: 3, // expected: "M", // }, // { // idx: 4, // expected: "A", // }, // { // idx: 5, // expected: "M", // }, // { // idx: 6, // expected: "J", // }, // { // idx: 7, // expected: "J", // }, // { // idx: 8, // expected: "A", // }, // { // idx: 9, // expected: "S", // }, // { // idx: 10, // expected: "O", // }, // { // idx: 11, // expected: "N", // }, // { // idx: 12, // expected: "D", // }, } for _, tt := range tests { s := trans.MonthNarrow(time.Month(tt.idx)) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestMonthsWide(t *testing.T) { trans := New() months := trans.MonthsWide() for i, month := range months { s := trans.MonthWide(time.Month(i + 1)) if s != month { t.Errorf("Expected '%s' Got '%s'", month, s) } } tests := []struct { idx int expected string }{ // { // idx: 1, // expected: "January", // }, // { // idx: 2, // expected: "February", // }, // { // idx: 3, // expected: "March", // }, // { // idx: 4, // expected: "April", // }, // { // idx: 5, // expected: "May", // }, // { // idx: 6, // expected: "June", // }, // { // idx: 7, // expected: "July", // }, // { // idx: 8, // expected: "August", // }, // { // idx: 9, // expected: "September", // }, // { // idx: 10, // expected: "October", // }, // { // idx: 11, // expected: "November", // }, // { // idx: 12, // expected: "December", // }, } for _, tt := range tests { s := string(trans.MonthWide(time.Month(tt.idx))) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeFull(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } // fixed := time.FixedZone("OTHER", -4) tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am Eastern Standard Time", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, fixed), // expected: "8:05:01 pm OTHER", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeLong(t *testing.T) { // loc, err := time.LoadLocation("America/Toronto") // if err != nil { // t.Errorf("Expected '<nil>' Got '%s'", err) // } tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, loc), // expected: "9:05:01 am EST", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, loc), // expected: "8:05:01 pm EST", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05:01 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05:01 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtTimeShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 5, 1, 0, time.UTC), // expected: "9:05 am", // }, // { // t: time.Date(2016, 02, 03, 20, 5, 1, 0, time.UTC), // expected: "8:05 pm", // }, } trans := New() for _, tt := range tests { s := trans.FmtTimeShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateFull(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Wednesday, February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateLong(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateLong(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateMedium(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Feb 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateMedium(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtDateShort(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/16", // }, // { // t: time.Date(-500, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "2/3/500", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateShort(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtNumber(t *testing.T) { tests := []struct { num float64 v uint64 expected string }{ // { // num: 1123456.5643, // v: 2, // expected: "1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // expected: "1,123,456.6", // }, // { // num: 221123456.5643, // v: 3, // expected: "221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // expected: "-221,123,456.564", // }, // { // num: 0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, // { // num: -0, // v: 2, // expected: "0.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtNumber(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtCurrency(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643, // v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "-$221,123,456.564", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "-CAD 221,123,456.564", // }, // { // num: 0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtCurrency(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtAccounting(t *testing.T) { tests := []struct { num float64 v uint64 currency currency.Type expected string }{ // { // num: 1123456.5643, // v: 2, // currency: currency.USD, // expected: "$1,123,456.56", // }, // { // num: 1123456.5643, // v: 1, // currency: currency.USD, // expected: "$1,123,456.60", // }, // { // num: 221123456.5643,<|fim▁hole|> // { // num: -221123456.5643, // v: 3, // currency: currency.USD, // expected: "($221,123,456.564)", // }, // { // num: -221123456.5643, // v: 3, // currency: currency.CAD, // expected: "(CAD 221,123,456.564)", // }, // { // num: -0, // v: 2, // currency: currency.USD, // expected: "$0.00", // }, // { // num: -0, // v: 2, // currency: currency.CAD, // expected: "CAD 0.00", // }, // { // num: 1.23, // v: 0, // currency: currency.USD, // expected: "$1.00", // }, } trans := New() for _, tt := range tests { s := trans.FmtAccounting(tt.num, tt.v, tt.currency) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } } func TestFmtPercent(t *testing.T) { tests := []struct { num float64 v uint64 expected string }{ // { // num: 15, // v: 0, // expected: "15%", // }, // { // num: 15, // v: 2, // expected: "15.00%", // }, // { // num: 434.45, // v: 0, // expected: "434%", // }, // { // num: 34.4, // v: 2, // expected: "34.40%", // }, // { // num: -34, // v: 0, // expected: "-34%", // }, } trans := New() for _, tt := range tests { s := trans.FmtPercent(tt.num, tt.v) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }<|fim▁end|>
// v: 3, // currency: currency.USD, // expected: "$221,123,456.564", // },
<|file_name|>viewport_rule.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The [`@viewport`][at] at-rule and [`meta`][meta] element. //! //! [at]: https://drafts.csswg.org/css-device-adapt/#atviewport-rule //! [meta]: https://drafts.csswg.org/css-device-adapt/#viewport-meta use app_units::Au; use context::QuirksMode; use cssparser::{parse_important, AtRuleParser, DeclarationListParser, DeclarationParser, Parser}; use cssparser::CowRcStr; use error_reporting::ContextualParseError; use euclid::TypedSize2D; use font_metrics::get_metrics_provider_for_product; use media_queries::Device; use parser::ParserContext; use properties::StyleBuilder; use rule_cache::RuleCacheConditions; use selectors::parser::SelectorParseErrorKind; use shared_lock::{SharedRwLockReadGuard, StylesheetGuards, ToCssWithGuard}; use std::borrow::Cow; use std::cell::RefCell; use std::fmt::{self, Write}; use std::iter::Enumerate; use std::str::Chars; use str::CssStringWriter; use style_traits::{CssWriter, ParseError, PinchZoomFactor, StyleParseErrorKind, ToCss}; use style_traits::viewport::{Orientation, UserZoom, ViewportConstraints, Zoom}; use stylesheets::{Origin, StylesheetInDocument}; use values::computed::{Context, ToComputedValue}; use values::specified::{LengthOrPercentageOrAuto, NoCalcLength, ViewportPercentageLength}; /// Whether parsing and processing of `@viewport` rules is enabled. #[cfg(feature = "servo")] pub fn enabled() -> bool { use servo_config::prefs::PREFS; PREFS .get("layout.viewport.enabled") .as_boolean() .unwrap_or(false) } /// Whether parsing and processing of `@viewport` rules is enabled. #[cfg(not(feature = "servo"))] pub fn enabled() -> bool { false // Gecko doesn't support @viewport. } macro_rules! declare_viewport_descriptor { ( $( $variant_name: expr => $variant: ident($data: ident), )+ ) => { declare_viewport_descriptor_inner!([] [ $( $variant_name => $variant($data), )+ ] 0); }; } macro_rules! declare_viewport_descriptor_inner { ( [ $( $assigned_variant_name: expr => $assigned_variant: ident($assigned_data: ident) = $assigned_discriminant: expr, )* ] [ $next_variant_name: expr => $next_variant: ident($next_data: ident), $( $variant_name: expr => $variant: ident($data: ident), )* ] $next_discriminant: expr ) => { declare_viewport_descriptor_inner! { [ $( $assigned_variant_name => $assigned_variant($assigned_data) = $assigned_discriminant, )* $next_variant_name => $next_variant($next_data) = $next_discriminant, ] [ $( $variant_name => $variant($data), )* ] $next_discriminant + 1 } }; ( [ $( $assigned_variant_name: expr => $assigned_variant: ident($assigned_data: ident) = $assigned_discriminant: expr, )* ] [ ] $number_of_variants: expr ) => { #[derive(Clone, Debug, PartialEq)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] #[allow(missing_docs)] pub enum ViewportDescriptor { $( $assigned_variant($assigned_data), )+ } const VIEWPORT_DESCRIPTOR_VARIANTS: usize = $number_of_variants; impl ViewportDescriptor { #[allow(missing_docs)] pub fn discriminant_value(&self) -> usize { match *self { $( ViewportDescriptor::$assigned_variant(..) => $assigned_discriminant, )* } } } impl ToCss for ViewportDescriptor { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { match *self { $( ViewportDescriptor::$assigned_variant(ref val) => { dest.write_str($assigned_variant_name)?; dest.write_str(": ")?; val.to_css(dest)?; }, )* } dest.write_str(";") } } }; } declare_viewport_descriptor! { "min-width" => MinWidth(ViewportLength), "max-width" => MaxWidth(ViewportLength), "min-height" => MinHeight(ViewportLength), "max-height" => MaxHeight(ViewportLength), "zoom" => Zoom(Zoom), "min-zoom" => MinZoom(Zoom), "max-zoom" => MaxZoom(Zoom), "user-zoom" => UserZoom(UserZoom), "orientation" => Orientation(Orientation), } trait FromMeta: Sized { fn from_meta(value: &str) -> Option<Self>; } /// ViewportLength is a length | percentage | auto | extend-to-zoom /// See: /// * http://dev.w3.org/csswg/css-device-adapt/#min-max-width-desc /// * http://dev.w3.org/csswg/css-device-adapt/#extend-to-zoom #[allow(missing_docs)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] #[derive(Clone, Debug, PartialEq, ToCss)] pub enum ViewportLength { Specified(LengthOrPercentageOrAuto), ExtendToZoom, } impl FromMeta for ViewportLength { fn from_meta(value: &str) -> Option<ViewportLength> { macro_rules! specified { ($value:expr) => { ViewportLength::Specified(LengthOrPercentageOrAuto::Length($value)) }; } Some(match value { v if v.eq_ignore_ascii_case("device-width") => specified!( NoCalcLength::ViewportPercentage(ViewportPercentageLength::Vw(100.)) ), v if v.eq_ignore_ascii_case("device-height") => specified!( NoCalcLength::ViewportPercentage(ViewportPercentageLength::Vh(100.)) ), _ => match value.parse::<f32>() { Ok(n) if n >= 0. => specified!(NoCalcLength::from_px(n.max(1.).min(10000.))), Ok(_) => return None, Err(_) => specified!(NoCalcLength::from_px(1.)), }, }) } } impl ViewportLength { fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { // we explicitly do not accept 'extend-to-zoom', since it is a UA // internal value for <META> viewport translation LengthOrPercentageOrAuto::parse_non_negative(context, input).map(ViewportLength::Specified) } } impl FromMeta for Zoom { fn from_meta(value: &str) -> Option<Zoom> { Some(match value { v if v.eq_ignore_ascii_case("yes") => Zoom::Number(1.), v if v.eq_ignore_ascii_case("no") => Zoom::Number(0.1), v if v.eq_ignore_ascii_case("device-width") => Zoom::Number(10.), v if v.eq_ignore_ascii_case("device-height") => Zoom::Number(10.), _ => match value.parse::<f32>() { Ok(n) if n >= 0. => Zoom::Number(n.max(0.1).min(10.)), Ok(_) => return None, Err(_) => Zoom::Number(0.1), }, }) } } impl FromMeta for UserZoom { fn from_meta(value: &str) -> Option<UserZoom> { Some(match value { v if v.eq_ignore_ascii_case("yes") => UserZoom::Zoom, v if v.eq_ignore_ascii_case("no") => UserZoom::Fixed, v if v.eq_ignore_ascii_case("device-width") => UserZoom::Zoom, v if v.eq_ignore_ascii_case("device-height") => UserZoom::Zoom, _ => match value.parse::<f32>() { Ok(n) if n >= 1. || n <= -1. => UserZoom::Zoom, _ => UserZoom::Fixed, }, }) } } struct ViewportRuleParser<'a, 'b: 'a> { context: &'a ParserContext<'b>, } #[derive(Clone, Debug, PartialEq)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] #[allow(missing_docs)] pub struct ViewportDescriptorDeclaration { pub origin: Origin, pub descriptor: ViewportDescriptor, pub important: bool, } impl ViewportDescriptorDeclaration { #[allow(missing_docs)] pub fn new( origin: Origin, descriptor: ViewportDescriptor, important: bool, ) -> ViewportDescriptorDeclaration { ViewportDescriptorDeclaration { origin: origin, descriptor: descriptor, important: important, } } } impl ToCss for ViewportDescriptorDeclaration { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { self.descriptor.to_css(dest)?; if self.important { dest.write_str(" !important")?; } dest.write_str(";") } } fn parse_shorthand<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<(ViewportLength, ViewportLength), ParseError<'i>> { let min = ViewportLength::parse(context, input)?; match input.try(|i| ViewportLength::parse(context, i)) { Err(_) => Ok((min.clone(), min)), Ok(max) => Ok((min, max)), } } impl<'a, 'b, 'i> AtRuleParser<'i> for ViewportRuleParser<'a, 'b> { type PreludeNoBlock = (); type PreludeBlock = (); type AtRule = Vec<ViewportDescriptorDeclaration>; type Error = StyleParseErrorKind<'i>; } impl<'a, 'b, 'i> DeclarationParser<'i> for ViewportRuleParser<'a, 'b> { type Declaration = Vec<ViewportDescriptorDeclaration>; type Error = StyleParseErrorKind<'i>; fn parse_value<'t>( &mut self, name: CowRcStr<'i>, input: &mut Parser<'i, 't>, ) -> Result<Vec<ViewportDescriptorDeclaration>, ParseError<'i>> { macro_rules! declaration { ($declaration:ident($parse:expr)) => { declaration!($declaration(value: try!($parse(input)), important: input.try(parse_important).is_ok())) }; ($declaration:ident(value: $value:expr, important: $important:expr)) => { ViewportDescriptorDeclaration::new( self.context.stylesheet_origin, ViewportDescriptor::$declaration($value), $important) } } macro_rules! ok { ($declaration:ident($parse:expr)) => { Ok(vec![declaration!($declaration($parse))]) }; (shorthand -> [$min:ident, $max:ident]) => {{ let shorthand = parse_shorthand(self.context, input)?; let important = input.try(parse_important).is_ok(); Ok(vec![ declaration!($min(value: shorthand.0, important: important)), declaration!($max(value: shorthand.1, important: important)), ]) }}; } match_ignore_ascii_case! { &*name, "min-width" => ok!(MinWidth(|i| ViewportLength::parse(self.context, i))), "max-width" => ok!(MaxWidth(|i| ViewportLength::parse(self.context, i))), "width" => ok!(shorthand -> [MinWidth, MaxWidth]), "min-height" => ok!(MinHeight(|i| ViewportLength::parse(self.context, i))), "max-height" => ok!(MaxHeight(|i| ViewportLength::parse(self.context, i))), "height" => ok!(shorthand -> [MinHeight, MaxHeight]), "zoom" => ok!(Zoom(Zoom::parse)), "min-zoom" => ok!(MinZoom(Zoom::parse)), "max-zoom" => ok!(MaxZoom(Zoom::parse)), "user-zoom" => ok!(UserZoom(UserZoom::parse)), "orientation" => ok!(Orientation(Orientation::parse)), _ => Err(input.new_custom_error(SelectorParseErrorKind::UnexpectedIdent(name.clone()))), } } } /// A `@viewport` rule. #[derive(Clone, Debug, PartialEq)] #[cfg_attr(feature = "servo", derive(MallocSizeOf))] pub struct ViewportRule { /// The declarations contained in this @viewport rule. pub declarations: Vec<ViewportDescriptorDeclaration>, } /// Whitespace as defined by DEVICE-ADAPT § 9.2 // TODO: should we just use whitespace as defined by HTML5? const WHITESPACE: &'static [char] = &['\t', '\n', '\r', ' ']; /// Separators as defined by DEVICE-ADAPT § 9.2 // need to use \x2c instead of ',' due to test-tidy const SEPARATOR: &'static [char] = &['\x2c', ';']; #[inline] fn is_whitespace_separator_or_equals(c: &char) -> bool { WHITESPACE.contains(c) || SEPARATOR.contains(c) || *c == '=' } impl ViewportRule { /// Parse a single @viewport rule. /// /// TODO(emilio): This could use the `Parse` trait now. pub fn parse<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Self, ParseError<'i>> { let parser = ViewportRuleParser { context }; let mut cascade = Cascade::new(); let mut parser = DeclarationListParser::new(input, parser); while let Some(result) = parser.next() { match result { Ok(declarations) => { for declarations in declarations { cascade.add(Cow::Owned(declarations)) } }, Err((error, slice)) => { let location = error.location; let error = ContextualParseError::UnsupportedViewportDescriptorDeclaration( slice, error, ); context.log_css_error(location, error); }, } } Ok(ViewportRule { declarations: cascade.finish(), }) } } impl ViewportRule { #[allow(missing_docs)] pub fn from_meta(content: &str) -> Option<ViewportRule> { let mut declarations = vec![None; VIEWPORT_DESCRIPTOR_VARIANTS]; macro_rules! push_descriptor { ($descriptor:ident($value:expr)) => {{ let descriptor = ViewportDescriptor::$descriptor($value); let discriminant = descriptor.discriminant_value(); declarations[discriminant] = Some(ViewportDescriptorDeclaration::new( Origin::Author, descriptor, false, )); }}; } let mut has_width = false; let mut has_height = false; let mut has_zoom = false; let mut iter = content.chars().enumerate(); macro_rules! start_of_name { ($iter:ident) => { $iter .by_ref() .skip_while(|&(_, c)| is_whitespace_separator_or_equals(&c)) .next() }; } while let Some((start, _)) = start_of_name!(iter) { let property = ViewportRule::parse_meta_property(content, &mut iter, start); if let Some((name, value)) = property { macro_rules! push { ($descriptor:ident($translate:path)) => { if let Some(value) = $translate(value) { push_descriptor!($descriptor(value)); } }; } match name { n if n.eq_ignore_ascii_case("width") => { if let Some(value) = ViewportLength::from_meta(value) { push_descriptor!(MinWidth(ViewportLength::ExtendToZoom)); push_descriptor!(MaxWidth(value)); has_width = true; } }, n if n.eq_ignore_ascii_case("height") => { if let Some(value) = ViewportLength::from_meta(value) { push_descriptor!(MinHeight(ViewportLength::ExtendToZoom)); push_descriptor!(MaxHeight(value)); has_height = true; } }, n if n.eq_ignore_ascii_case("initial-scale") => { if let Some(value) = Zoom::from_meta(value) { push_descriptor!(Zoom(value)); has_zoom = true; } }, n if n.eq_ignore_ascii_case("minimum-scale") => push!(MinZoom(Zoom::from_meta)), n if n.eq_ignore_ascii_case("maximum-scale") => push!(MaxZoom(Zoom::from_meta)), n if n.eq_ignore_ascii_case("user-scalable") => { push!(UserZoom(UserZoom::from_meta)) }, _ => {}, } } } // DEVICE-ADAPT § 9.4 - The 'width' and 'height' properties // http://dev.w3.org/csswg/css-device-adapt/#width-and-height-properties if !has_width && has_zoom { if has_height { push_descriptor!(MinWidth(ViewportLength::Specified( LengthOrPercentageOrAuto::Auto ))); push_descriptor!(MaxWidth(ViewportLength::Specified( LengthOrPercentageOrAuto::Auto ))); } else { push_descriptor!(MinWidth(ViewportLength::ExtendToZoom)); push_descriptor!(MaxWidth(ViewportLength::ExtendToZoom)); } } let declarations: Vec<_> = declarations.into_iter().filter_map(|entry| entry).collect(); if !declarations.is_empty() { Some(ViewportRule { declarations: declarations, }) } else { None } } fn parse_meta_property<'a>( content: &'a str, iter: &mut Enumerate<Chars<'a>>, start: usize, ) -> Option<(&'a str, &'a str)> { fn end_of_token(iter: &mut Enumerate<Chars>) -> Option<(usize, char)> { iter.by_ref() .skip_while(|&(_, c)| !is_whitespace_separator_or_equals(&c)) .next() } fn skip_whitespace(iter: &mut Enumerate<Chars>) -> Option<(usize, char)> { iter.by_ref() .skip_while(|&(_, c)| WHITESPACE.contains(&c)) .next() } // <name> <whitespace>* '=' let end = match end_of_token(iter) { Some((end, c)) if WHITESPACE.contains(&c) => match skip_whitespace(iter) { Some((_, c)) if c == '=' => end, _ => return None, }, Some((end, c)) if c == '=' => end, _ => return None, }; let name = &content[start..end]; // <whitespace>* <value> let start = match skip_whitespace(iter) { Some((start, c)) if !SEPARATOR.contains(&c) => start, _ => return None, }; let value = match end_of_token(iter) { Some((end, _)) => &content[start..end], _ => &content[start..], }; Some((name, value)) } } impl ToCssWithGuard for ViewportRule { // Serialization of ViewportRule is not specced. fn to_css(&self, _guard: &SharedRwLockReadGuard, dest: &mut CssStringWriter) -> fmt::Result { dest.write_str("@viewport { ")?; let mut iter = self.declarations.iter();<|fim▁hole|> dest.write_str(" ")?; declaration.to_css(&mut CssWriter::new(dest))?; } dest.write_str(" }") } } /// Computes the cascade precedence as according to /// <http://dev.w3.org/csswg/css-cascade/#cascade-origin> fn cascade_precendence(origin: Origin, important: bool) -> u8 { match (origin, important) { (Origin::UserAgent, true) => 1, (Origin::User, true) => 2, (Origin::Author, true) => 3, (Origin::Author, false) => 4, (Origin::User, false) => 5, (Origin::UserAgent, false) => 6, } } impl ViewportDescriptorDeclaration { fn higher_or_equal_precendence(&self, other: &ViewportDescriptorDeclaration) -> bool { let self_precedence = cascade_precendence(self.origin, self.important); let other_precedence = cascade_precendence(other.origin, other.important); self_precedence <= other_precedence } } #[allow(missing_docs)] pub struct Cascade { declarations: Vec<Option<(usize, ViewportDescriptorDeclaration)>>, count_so_far: usize, } #[allow(missing_docs)] impl Cascade { pub fn new() -> Self { Cascade { declarations: vec![None; VIEWPORT_DESCRIPTOR_VARIANTS], count_so_far: 0, } } pub fn from_stylesheets<'a, I, S>( stylesheets: I, guards: &StylesheetGuards, device: &Device, ) -> Self where I: Iterator<Item = (&'a S, Origin)>, S: StylesheetInDocument + 'static, { let mut cascade = Self::new(); for (stylesheet, origin) in stylesheets { stylesheet.effective_viewport_rules(device, guards.for_origin(origin), |rule| { for declaration in &rule.declarations { cascade.add(Cow::Borrowed(declaration)) } }) } cascade } pub fn add(&mut self, declaration: Cow<ViewportDescriptorDeclaration>) { let descriptor = declaration.descriptor.discriminant_value(); match self.declarations[descriptor] { Some((ref mut order_of_appearance, ref mut entry_declaration)) => { if declaration.higher_or_equal_precendence(entry_declaration) { *entry_declaration = declaration.into_owned(); *order_of_appearance = self.count_so_far; } }, ref mut entry @ None => { *entry = Some((self.count_so_far, declaration.into_owned())); }, } self.count_so_far += 1; } pub fn finish(mut self) -> Vec<ViewportDescriptorDeclaration> { // sort the descriptors by order of appearance self.declarations .sort_by_key(|entry| entry.as_ref().map(|&(index, _)| index)); self.declarations .into_iter() .filter_map(|entry| entry.map(|(_, decl)| decl)) .collect() } } /// Just a helper trait to be able to implement methods on ViewportConstraints. pub trait MaybeNew { /// Create a ViewportConstraints from a viewport size and a `@viewport` /// rule. fn maybe_new( device: &Device, rule: &ViewportRule, quirks_mode: QuirksMode, ) -> Option<ViewportConstraints>; } impl MaybeNew for ViewportConstraints { fn maybe_new( device: &Device, rule: &ViewportRule, quirks_mode: QuirksMode, ) -> Option<ViewportConstraints> { use std::cmp; if rule.declarations.is_empty() { return None; } let mut min_width = None; let mut max_width = None; let mut min_height = None; let mut max_height = None; let mut initial_zoom = None; let mut min_zoom = None; let mut max_zoom = None; let mut user_zoom = UserZoom::Zoom; let mut orientation = Orientation::Auto; // collapse the list of declarations into descriptor values for declaration in &rule.declarations { match declaration.descriptor { ViewportDescriptor::MinWidth(ref value) => min_width = Some(value), ViewportDescriptor::MaxWidth(ref value) => max_width = Some(value), ViewportDescriptor::MinHeight(ref value) => min_height = Some(value), ViewportDescriptor::MaxHeight(ref value) => max_height = Some(value), ViewportDescriptor::Zoom(value) => initial_zoom = value.to_f32(), ViewportDescriptor::MinZoom(value) => min_zoom = value.to_f32(), ViewportDescriptor::MaxZoom(value) => max_zoom = value.to_f32(), ViewportDescriptor::UserZoom(value) => user_zoom = value, ViewportDescriptor::Orientation(value) => orientation = value, } } // TODO: return `None` if all descriptors are either absent or initial value macro_rules! choose { ($op:ident, $opta:expr, $optb:expr) => { match ($opta, $optb) { (None, None) => None, (a, None) => a, (None, b) => b, (Some(a), Some(b)) => Some(a.$op(b)), } }; } macro_rules! min { ($opta:expr, $optb:expr) => { choose!(min, $opta, $optb) }; } macro_rules! max { ($opta:expr, $optb:expr) => { choose!(max, $opta, $optb) }; } // DEVICE-ADAPT § 6.2.1 Resolve min-zoom and max-zoom values if min_zoom.is_some() && max_zoom.is_some() { max_zoom = Some(min_zoom.unwrap().max(max_zoom.unwrap())) } // DEVICE-ADAPT § 6.2.2 Constrain zoom value to the [min-zoom, max-zoom] range if initial_zoom.is_some() { initial_zoom = max!(min_zoom, min!(max_zoom, initial_zoom)); } // DEVICE-ADAPT § 6.2.3 Resolve non-auto lengths to pixel lengths let initial_viewport = device.au_viewport_size(); let provider = get_metrics_provider_for_product(); let mut conditions = RuleCacheConditions::default(); let context = Context { is_root_element: false, // Note: DEVICE-ADAPT § 5. states that relative length values are // resolved against initial values builder: StyleBuilder::for_inheritance(device, None, None), font_metrics_provider: &provider, cached_system_font: None, in_media_query: false, quirks_mode: quirks_mode, for_smil_animation: false, for_non_inherited_property: None, rule_cache_conditions: RefCell::new(&mut conditions), }; // DEVICE-ADAPT § 9.3 Resolving 'extend-to-zoom' let extend_width; let extend_height; if let Some(extend_zoom) = max!(initial_zoom, max_zoom) { let scale_factor = 1. / extend_zoom; extend_width = Some(initial_viewport.width.scale_by(scale_factor)); extend_height = Some(initial_viewport.height.scale_by(scale_factor)); } else { extend_width = None; extend_height = None; } macro_rules! to_pixel_length { ($value:ident, $dimension:ident, $extend_to:ident => $auto_extend_to:expr) => { if let Some($value) = $value { match *$value { ViewportLength::Specified(ref length) => match *length { LengthOrPercentageOrAuto::Length(ref value) => { Some(Au::from(value.to_computed_value(&context))) }, LengthOrPercentageOrAuto::Percentage(value) => { Some(initial_viewport.$dimension.scale_by(value.0)) }, LengthOrPercentageOrAuto::Auto => None, LengthOrPercentageOrAuto::Calc(ref calc) => calc.to_computed_value( &context, ).to_used_value(Some(initial_viewport.$dimension)), }, ViewportLength::ExtendToZoom => { // $extend_to will be 'None' if 'extend-to-zoom' is 'auto' match ($extend_to, $auto_extend_to) { (None, None) => None, (a, None) => a, (None, b) => b, (a, b) => cmp::max(a, b), } }, } } else { None } }; } // DEVICE-ADAPT § 9.3 states that max-descriptors need to be resolved // before min-descriptors. // http://dev.w3.org/csswg/css-device-adapt/#resolve-extend-to-zoom let max_width = to_pixel_length!(max_width, width, extend_width => None); let max_height = to_pixel_length!(max_height, height, extend_height => None); let min_width = to_pixel_length!(min_width, width, extend_width => max_width); let min_height = to_pixel_length!(min_height, height, extend_height => max_height); // DEVICE-ADAPT § 6.2.4 Resolve initial width and height from min/max descriptors macro_rules! resolve { ($min:ident, $max:ident, $initial:expr) => { if $min.is_some() || $max.is_some() { let max = match $max { Some(max) => cmp::min(max, $initial), None => $initial, }; Some(match $min { Some(min) => cmp::max(min, max), None => max, }) } else { None }; }; } let width = resolve!(min_width, max_width, initial_viewport.width); let height = resolve!(min_height, max_height, initial_viewport.height); // DEVICE-ADAPT § 6.2.5 Resolve width value let width = if width.is_none() && height.is_none() { Some(initial_viewport.width) } else { width }; let width = width.unwrap_or_else(|| match initial_viewport.height { Au(0) => initial_viewport.width, initial_height => { let ratio = initial_viewport.width.to_f32_px() / initial_height.to_f32_px(); Au::from_f32_px(height.unwrap().to_f32_px() * ratio) }, }); // DEVICE-ADAPT § 6.2.6 Resolve height value let height = height.unwrap_or_else(|| match initial_viewport.width { Au(0) => initial_viewport.height, initial_width => { let ratio = initial_viewport.height.to_f32_px() / initial_width.to_f32_px(); Au::from_f32_px(width.to_f32_px() * ratio) }, }); Some(ViewportConstraints { size: TypedSize2D::new(width.to_f32_px(), height.to_f32_px()), // TODO: compute a zoom factor for 'auto' as suggested by DEVICE-ADAPT § 10. initial_zoom: PinchZoomFactor::new(initial_zoom.unwrap_or(1.)), min_zoom: min_zoom.map(PinchZoomFactor::new), max_zoom: max_zoom.map(PinchZoomFactor::new), user_zoom: user_zoom, orientation: orientation, }) } }<|fim▁end|>
iter.next().unwrap().to_css(&mut CssWriter::new(dest))?; for declaration in iter {
<|file_name|>quartet_sampling.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim:fileencoding=utf-8 """ quartet_samping.py: Quartet Sampling method for phylogenetic branch support evaluation <http://www.github.com/FePhyFoFum/quartetsampling> """ import argparse import os import sys import time from multiprocessing import Manager, Pool from shutil import copyfile from tree_data import TreeData, write_test_trees from rep_data import DataStore from rep_data import process_replicate_raxml, process_replicate_raxml_lrt from rep_data import process_replicate_raxmlng, process_replicate_raxmlng_lrt from rep_data import process_replicate_iqtree, process_replicate_iqtree_lrt from rep_data import process_replicate_paup from rep_data import get_replicates_exhaustive, get_replicates_random from rep_data import write_run_stats from paramset import ParamSet, read_config from alignment import Alignment LICENSE = """from rep_data import This file is part of 'quartetsampling'. 'quartetsampling' is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. 'quartetsampling' is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with 'quartetsampling'. If not, see <http://www.gnu.org/licenses/>. """ def generate_argparser(): parser = argparse.ArgumentParser( prog="quartet_sampling.py", description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter, epilog=LICENSE) parser.add_argument("--tree", type=open, nargs=1, required=True, # Prev -t help=("The input tree in Newick " "(parenthetical) format.")) parser.add_argument("--align", "--alignment", type=open, nargs=1, # Prev -a required=True, dest="align", help=("Alignment file in \"relaxed phylip\" format, " "as used by RAxML.")) parser.add_argument("--reps", "--number-of-reps", type=int, nargs=1, # Prev -N required=True, default=100, dest="reps", help=("The number of replicate quartet topology " "searches to be performed at each node.")) parser.add_argument("--threads", "--number-of-threads", type=int, nargs=1, # Prev -T required=True, default=1, dest="threads", help=("The number of parallel threads to be used " "by Python for quartet topology searches.")) parser.add_argument("--lnlike", "--lnlike-thresh", type=float, nargs=1, # Prev -L default=2.0, dest="lnlike", help=("The lnlike threshhold that is the minimum " "value by which the log-likelihood value " "of the best-likelihood tree must be " "higher than the second-best-likelihood tree " "for the replicate to register as the " "best-likelihood topology rather than " "'uncertain'. If set to zero, this turns off " "likelihood evaluation mode and invokes tree " "inference mode where a tree is simply inferred " "from the alignment without considering " "likelihood (QI values are N/A in this case).")) parser.add_argument("--result-prefix", type=str, nargs=1, # Prev -r help="A prefix to put on the result files.") parser.add_argument("--data-type", choices=('nuc', 'amino', 'cat'), # Prev -d default=["nuc"], nargs=1, help=("(nuc)leotide, (amino) acid, " "or (cat)egorical data")) parser.add_argument("--min-overlap", type=int, # Prev -O help=("The minimum sites required to be sampled for " "all taxa in a given quartet.")) parser.add_argument("--results-dir", type=os.path.abspath, nargs=1, # Prev -o help=("A directory to which output files will " "be saved. If not supplied, the current working " "directory will be used. (default is current " "folder).")) parser.add_argument("--verbout", action="store_true", # Prev -V help=("Provide output of the frequencies of each " "topology and QC.")) parser.add_argument("--partitions", type=os.path.abspath, nargs=1, # Prev -q help=("Partitions file in RAxML format. If omitted " "then the entire alignment will be treated " "as one partition for all quartet replicate " "topology searches.")) parser.add_argument("--genetrees", type=os.path.abspath, nargs=1, # Prev -g help=("Use partitions file (RAxML format) to divide " "the alignment into separate gene tree regions. " "Gene alignments will be sampled random for the " "quartet topology searches.")) parser.add_argument("--temp-dir", type=os.path.abspath, nargs=1, # Prev -e help=("A directory to which temporary files will be " "saved. If not supplied, 'QuartetSampling' " "will be created in the current " "working directory. " "When specifying a custom temporary output " "the characters 'QuartetSampling' must appear " "in the directory name to prevent accidental " "file deletion. (default='./QuartetSampling'")) parser.add_argument("--retain-temp", action="store_true", help=("Do not remove temporary files")) parser.add_argument("--clade", type=str, # Prev: -C help=("Conduct analysis on specific clade identified " "by CSV taxon list")) parser.add_argument("--start-node-number", type=int, nargs=1, # Prev -s help=("An integer denoting the node to which to start " "from. Nodes will be read from topologically " "identical (and isomorphic!) input trees in " "deterministic order, so this argument may be " "used to restart at an intermediate position " "(in case the previous run was canceled before " "completion, for example).")) parser.add_argument("--stop-node-number", type=int, nargs=1, # Prev -p help=("An integer denoting the node at which to stop. " "Will include nodes with indices <= the stop " "node number. This argument may be used to " "limit the length of a given run in case only " "a certain part of the tree is of interest. " "Nodes will be read from topologically " "identical (and isomorphic!) input trees " "in deterministic order.")) parser.add_argument("--engine", nargs=1, default=('raxml-ng',), choices=('raxml-ng', 'raxml', 'paup', 'iqtree'), help=("Name of the program to use to infer trees or" " evaluate tree model likelihoods.")) parser.add_argument("--engine-exec", nargs=1, help=("Full file path of the tree inference or" " likelihood evaluation engine.")) parser.add_argument("--engine-model", nargs=1, help=("Advanced: specify a custom model name " "for the tree engine")) # parser.add_argument("--raxml-model", nargs=1, # help=("Advanced: specify a custom RAxML model name " # "for the raxml '-m' parameter")) # parser.add_argument("-X", "--raxml-executable", nargs=1, # help=("The name (or absolute path) of the raxml " # "executable to be used for calculating " # "likelihoods on quartet topologies." # "(default='raxml')")) # parser.add_argument("--raxml-model", nargs=1, # help=("Advanced: specify a custom RAxML model name " # "for the raxml '-m' parameter")) # parser.add_argument("-P", "--paup", action="store_true", # help="Use PAUP instead of RAxML.") # parser.add_argument("--paup-executable", nargs=1, default=["paup"], # help=("The name or path of the PAUP executable to " # "be used for calculated quartets.")) parser.add_argument("--ignore-errors", action="store_true", help=("Ignore RAxML and PAUP erroneous runs")) parser.add_argument("--low-mem", action="store_true", help=("Do not store large alignment in memory " "for whole-alignment (non-genetree) mode")) parser.add_argument('--max-random-sample-proportion', type=float, help=("The proportion of possible replicates explored " "unsuccessfully by the random generation " "procedure before it gives up. Because this " "generates random replicates, it takes " "progressively longer as it proceeds. To avoid " "long runtimes, the recommended range is < 0.5 " "(which is the default).")) parser.add_argument("--calc-qdstats", action="store_true", help=("EXPERIMENTAL: Calculates Chi-square test " "for QD tree frequencies. Use only " " if Scipy is available. " "Will increase running time.")) parser.add_argument("--verbose", action="store_true", help="Provide more verbose output if specified.") parser.add_argument('--version', action='version', version='%(prog)s version 1.3.1.b') return parser def main(arguments=None): """Main method for quartet_sampling""" if arguments is None: if (len(sys.argv) == 2 and sys.argv[1] not in ('-h', '--help', '--version')): arguments = read_config(sys.argv[1]) print("Config file used.") print("Executing with arguments: ", " ".join(arguments)) else: arguments = sys.argv[1:] parser = generate_argparser() args = parser.parse_args(arguments) treedata = TreeData(args) params = ParamSet() params.setup(args, treedata.nleaves) if args.verbose: print("-----------") print("PARAMETERS:") print(params) print("-----------") maindata = DataStore(params) # shared object access for multithreading manager = Manager() lock = manager.RLock() aln = Alignment(params) if params['using_genetrees']: aln.read_genes(args.align[0], params) else: aln.read_align(args.align[0], params) params['min_overlap'] = aln.min_overlap # k is the node counter k = 1 # if we are starting at the beginning, initialize the results file # (otherwise assume it's already there and don't overwrite it) if not params['startk'] > k: maindata.write_headers(params['score_result_file_path']) maindata.write_headers(params['nodecounts_result_file_path'], restype="nodecounts", delim='\t') # process the nodes in the tree params['starttime'] = time.time() for fnode in treedata.tree.iternodes(): if params['verbose'] is True: print("testing node", [x.label for x in fnode.leaves()]) if treedata.clade is not None: if fnode is not treedata.clade: continue os.chdir(params['temp_wd']) if k > params['stopk']: print("Processed all nodes up to the stop node. Exiting...") break write_test_trees() # skip tips and root k, leafsets = treedata.check_node(fnode, k, params) if leafsets is False: if params['verbose'] is True: print("skipping node...") continue # Begin multiprocessing queue results_queue = manager.Queue() n_completed = manager.Value("i", 0, "lock") # Establish replicates n_possible_replicates = 1 for leafset in leafsets.values(): n_possible_replicates *= len(leafset) if params['using_genetrees']: n_possible_replicates *= len(aln.seqs) if params['verbose'] is True: print('number of possible gene-quartet combos: {}'.format( n_possible_replicates)) elif params['verbose'] is True: print('number of possible quartets: {}'.format( n_possible_replicates)) if (n_possible_replicates * params['max_quartet_enumeration_threshold'] < params['nreps']): if params['verbose'] is True: print('Number of possible quartets is close enough to the ' 'total number to be sampled, so will generate all ' 'and do a random draw') replicates, repstats = get_replicates_exhaustive( n_completed, results_queue, leafsets, params, aln, fnode, lock) else: if params['verbose']: print('Generating random quartets...') replicates, repstats = get_replicates_random( n_completed, results_queue, leafsets, params, aln, fnode, lock) nreplicates = len(replicates) if nreplicates < 1: # no suitable replicates maindata.process_empty_rep_results(fnode, params, nreplicates) else: # copy original partitions file, should not change throughout run if params['partitions_file_path'] is not None: copyfile(params['partitions_file_path'], "temp_parts") # run the raxml calls in parallel # now designate multiprocessing resource pool. # important to do outside node loop. garbage collecting does not # apply to threads! set maxtasksperchild to release mem and files pool = Pool(params['nprocs'], maxtasksperchild=1) # PAUP Case if params['engine'] == 'paup': pool.map(process_replicate_paup, replicates) # IQ-TREE with likelihood threshold elif params['lnlikethresh'] > 0 and params['engine'] == 'iqtree': pool.map(process_replicate_iqtree_lrt, replicates) # IQ-TREE with likelihood threshold elif params['engine'] == 'iqtree': pool.map(process_replicate_iqtree, replicates) # RAxML Classic with likelihood threshold elif params['lnlikethresh'] > 0 and params['engine'] == 'raxml': pool.map(process_replicate_raxml_lrt, replicates) # RAxML Classic without likelihood threshold elif params['engine'] == 'raxml': pool.map(process_replicate_raxml, replicates) # RAxML-ng with likelihood threshold elif params['lnlikethresh'] > 0: pool.map(process_replicate_raxmlng_lrt, replicates) # RAxML-ng without likelihood threshold else: pool.map(process_replicate_raxmlng, replicates) pool.close()<|fim▁hole|> # now process the results. first open a file to hold topologies # sending params['just_clade'] = True will give back detailed # name results maindata.process_rep_results(fnode, results_queue, params, nreplicates) # , leafsets) # clean up del results_queue del n_completed # break # Left in place for troubleshooting if params['retain_temp'] is False: for the_file in os.listdir(params['temp_wd']): file_path = os.path.join(params['temp_wd'], the_file) try: if os.path.isfile(file_path): if "QuartetSampling" not in file_path: print(file_path, " does not contain 'QuartetSampling' " "and will not be deleted for safety") else: os.remove(file_path) except FileNotFoundError as exc: print(file_path, " not found") if 'QuartetSampling' in params['temp_wd']: os.rmdir(params['temp_wd']) qf_scores = maindata.write_qf_scores(params["score_result_file_path"]) treedata.write_figtree(params['figtree_file_path'], qf_scores) treedata.write_scoretrees(params) write_run_stats(repstats, params) print(("\ndone.\nscores written to: {}\nlabeled " "tree written to: {}\ntotal time {:.2f} hours").format( params['score_result_file_path'], params['tree_result_file_path'], (time.time() - params['starttime']) / 3600)) return '' if __name__ == "__main__": main()<|fim▁end|>
pool.join() del pool # print("")
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! FFI bindings for `energymon-osp-polling.h`. extern crate libc; extern crate energymon_sys; pub use energymon_sys::energymon; use libc::{c_int, uint64_t, c_char, size_t}; extern "C" { pub fn energymon_init_osp_polling(em: *mut energymon) -> c_int; <|fim▁hole|> pub fn energymon_finish_osp_polling(em: *mut energymon) -> c_int; pub fn energymon_get_source_osp_polling(buffer: *mut c_char, n: size_t) -> *mut c_char; pub fn energymon_get_interval_osp_polling(em: *const energymon) -> uint64_t; pub fn energymon_get_precision_osp_polling(em: *const energymon) -> uint64_t; pub fn energymon_is_exclusive_osp_polling() -> c_int; pub fn energymon_get_osp_polling(em: *mut energymon) -> c_int; }<|fim▁end|>
pub fn energymon_read_total_osp_polling(em: *const energymon) -> uint64_t;
<|file_name|>0024_auto_20160325_1916.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*- from __future__ import unicode_literals import django.utils.timezone from django.db import migrations from django.db import models class Migration(migrations.Migration): dependencies = [ ('orchestra', '0023_assignment_failed'), ] operations = [ migrations.AddField( model_name='certification', name='created_at', field=models.DateTimeField(default=django.utils.timezone.now), ), migrations.AddField( model_name='step', name='created_at', field=models.DateTimeField(default=django.utils.timezone.now), ), migrations.AddField( model_name='workercertification', name='created_at', field=models.DateTimeField(default=django.utils.timezone.now), ), migrations.AddField( model_name='workflow', name='created_at', field=models.DateTimeField(default=django.utils.timezone.now), ), migrations.AddField( model_name='workflowversion', name='created_at', field=models.DateTimeField(default=django.utils.timezone.now), ), ]<|fim▁end|>
<|file_name|>DoubleList.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016 QAware GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.qaware.chronix.timeseries.dt; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import java.io.Serializable; import java.util.Arrays; import static de.qaware.chronix.timeseries.dt.ListUtil.*; /** * Implementation of a list with primitive doubles. * * @author f.lautenschlager */ public class DoubleList implements Serializable { private static final long serialVersionUID = -1275724597860546074L; /** * Shared empty array instance used for empty instances. */ private static final double[] EMPTY_ELEMENT_DATA = {}; /** * Shared empty array instance used for default sized empty instances. We * distinguish this from EMPTY_ELEMENT_DATA to know how much to inflate when * first element is added. */ private static final double[] DEFAULT_CAPACITY_EMPTY_ELEMENT_DATA = {}; private double[] doubles; private int size; /** * Constructs an empty list with the specified initial capacity. * * @param initialCapacity the initial capacity of the list * @throws IllegalArgumentException if the specified initial capacity * is negative */ public DoubleList(int initialCapacity) { if (initialCapacity > 0) { this.doubles = new double[initialCapacity]; } else if (initialCapacity == 0) { this.doubles = EMPTY_ELEMENT_DATA; } else { throw new IllegalArgumentException("Illegal Capacity: " + initialCapacity); } } /** * Constructs an empty list with an initial capacity of ten. */ public DoubleList() { this.doubles = DEFAULT_CAPACITY_EMPTY_ELEMENT_DATA; } /** * Constructs a double list from the given values by simple assigning them. * * @param longs the values of the double list. * @param size the index of the last value in the array. */ @SuppressWarnings("all") public DoubleList(double[] longs, int size) { if (longs == null) { throw new IllegalArgumentException("Illegal initial array 'null'"); } if (size < 0) { throw new IllegalArgumentException("Size if negative."); } this.doubles = longs; this.size = size; } /** * Returns the number of elements in this list. * * @return the number of elements in this list */ public int size() { return size; } /** * Returns <tt>true</tt> if this list contains no elements. * * @return <tt>true</tt> if this list contains no elements */ public boolean isEmpty() { return size == 0; } /** * Returns <tt>true</tt> if this list contains the specified element. * More formally, returns <tt>true</tt> if and only if this list contains * at least one element <tt>e</tt> such that * <tt>(o==null&nbsp;?&nbsp;e==null&nbsp;:&nbsp;o.equals(e))</tt>. * * @param o element whose presence in this list is to be tested * @return <tt>true</tt> if this list contains the specified element */ public boolean contains(double o) { return indexOf(o) >= 0; } /** * Returns the index of the first occurrence of the specified element * in this list, or -1 if this list does not contain the element. * More formally, returns the lowest index <tt>i</tt> such that * <tt>(o==null&nbsp;?&nbsp;get(i)==null&nbsp;:&nbsp;o.equals(get(i)))</tt>, * or -1 if there is no such index. * * @param o the double value * @return the index of the given double element */ public int indexOf(double o) { for (int i = 0; i < size; i++) { if (o == doubles[i]) { return i; } } return -1; } /** * Returns the index of the last occurrence of the specified element * in this list, or -1 if this list does not contain the element. * More formally, returns the highest index <tt>i</tt> such that * <tt>(o==null&nbsp;?&nbsp;get(i)==null&nbsp;:&nbsp;o.equals(get(i)))</tt>, * or -1 if there is no such index. * * @param o the double value * @return the last index of the given double element */ public int lastIndexOf(double o) { for (int i = size - 1; i >= 0; i--) { if (o == doubles[i]) { return i; } } return -1; } /** * Returns a shallow copy of this <tt>LongList</tt> instance. (The * elements themselves are not copied.) *<|fim▁hole|> v.doubles = Arrays.copyOf(doubles, size); v.size = size; return v; } /** * Returns an array containing all of the elements in this list * in proper sequence (from first to last element). * <p> * <p>The returned array will be "safe" in that no references to it are * maintained by this list. (In other words, this method must allocate * a new array). The caller is thus free to modify the returned array. * <p> * <p>This method acts as bridge between array-based and collection-based * APIs. * * @return an array containing all of the elements in this list in * proper sequence */ public double[] toArray() { return Arrays.copyOf(doubles, size); } private void growIfNeeded(int newCapacity) { if (newCapacity != -1) { doubles = Arrays.copyOf(doubles, newCapacity); } } /** * Returns the element at the specified position in this list. * * @param index index of the element to return * @return the element at the specified position in this list * @throws IndexOutOfBoundsException */ public double get(int index) { rangeCheck(index, size); return doubles[index]; } /** * Replaces the element at the specified position in this list with * the specified element. * * @param index index of the element to replace * @param element element to be stored at the specified position * @return the element previously at the specified position * @throws IndexOutOfBoundsException */ public double set(int index, double element) { rangeCheck(index, size); double oldValue = doubles[index]; doubles[index] = element; return oldValue; } /** * Appends the specified element to the end of this list. * * @param e element to be appended to this list * @return <tt>true</tt> (as specified by Collection#add) */ public boolean add(double e) { int newCapacity = calculateNewCapacity(doubles.length, size + 1); growIfNeeded(newCapacity); doubles[size++] = e; return true; } /** * Inserts the specified element at the specified position in this * list. Shifts the element currently at that position (if any) and * any subsequent elements to the right (adds one to their indices). * * @param index index at which the specified element is to be inserted * @param element element to be inserted * @throws IndexOutOfBoundsException */ public void add(int index, double element) { rangeCheckForAdd(index, size); int newCapacity = calculateNewCapacity(doubles.length, size + 1); growIfNeeded(newCapacity); System.arraycopy(doubles, index, doubles, index + 1, size - index); doubles[index] = element; size++; } /** * Removes the element at the specified position in this list. * Shifts any subsequent elements to the left (subtracts one from their * indices). * * @param index the index of the element to be removed * @return the element that was removed from the list * @throws IndexOutOfBoundsException */ public double remove(int index) { rangeCheck(index, size); double oldValue = doubles[index]; int numMoved = size - index - 1; if (numMoved > 0) { System.arraycopy(doubles, index + 1, doubles, index, numMoved); } --size; return oldValue; } /** * Removes the first occurrence of the specified element from this list, * if it is present. If the list does not contain the element, it is * unchanged. More formally, removes the element with the lowest index * <tt>i</tt> such that * <tt>(o==null&nbsp;?&nbsp;get(i)==null&nbsp;:&nbsp;o.equals(get(i)))</tt> * (if such an element exists). Returns <tt>true</tt> if this list * contained the specified element (or equivalently, if this list * changed as a result of the call). * * @param o element to be removed from this list, if present * @return <tt>true</tt> if this list contained the specified element */ public boolean remove(double o) { for (int index = 0; index < size; index++) { if (o == doubles[index]) { fastRemove(index); return true; } } return false; } private void fastRemove(int index) { int numMoved = size - index - 1; if (numMoved > 0) { System.arraycopy(doubles, index + 1, doubles, index, numMoved); } --size; } /** * Removes all of the elements from this list. The list will * be empty after this call returns. */ public void clear() { doubles = DEFAULT_CAPACITY_EMPTY_ELEMENT_DATA; size = 0; } /** * Appends all of the elements in the specified collection to the end of * this list, in the order that they are returned by the * specified collection's Iterator. The behavior of this operation is * undefined if the specified collection is modified while the operation * is in progress. (This implies that the behavior of this call is * undefined if the specified collection is this list, and this * list is nonempty.) * * @param c collection containing elements to be added to this list * @return <tt>true</tt> if this list changed as a result of the call * @throws NullPointerException if the specified collection is null */ public boolean addAll(DoubleList c) { double[] a = c.toArray(); int numNew = a.length; int newCapacity = calculateNewCapacity(doubles.length, size + numNew); growIfNeeded(newCapacity); System.arraycopy(a, 0, doubles, size, numNew); size += numNew; return numNew != 0; } /** * Appends the long[] at the end of this long list. * * @param otherDoubles the other double[] that is appended * @return <tt>true</tt> if this list changed as a result of the call * @throws NullPointerException if the specified array is null */ public boolean addAll(double[] otherDoubles) { int numNew = otherDoubles.length; int newCapacity = calculateNewCapacity(doubles.length, size + numNew); growIfNeeded(newCapacity); System.arraycopy(otherDoubles, 0, doubles, size, numNew); size += numNew; return numNew != 0; } /** * Inserts all of the elements in the specified collection into this * list, starting at the specified position. Shifts the element * currently at that position (if any) and any subsequent elements to * the right (increases their indices). The new elements will appear * in the list in the order that they are returned by the * specified collection's iterator. * * @param index index at which to insert the first element from the * specified collection * @param c collection containing elements to be added to this list * @return <tt>true</tt> if this list changed as a result of the call * @throws IndexOutOfBoundsException * @throws NullPointerException if the specified collection is null */ public boolean addAll(int index, DoubleList c) { rangeCheckForAdd(index, size); double[] a = c.toArray(); int numNew = a.length; int newCapacity = calculateNewCapacity(doubles.length, size + numNew); growIfNeeded(newCapacity); int numMoved = size - index; if (numMoved > 0) { System.arraycopy(doubles, index, doubles, index + numNew, numMoved); } System.arraycopy(a, 0, doubles, index, numNew); size += numNew; return numNew != 0; } /** * Removes from this list all of the elements whose index is between * {@code fromIndex}, inclusive, and {@code toIndex}, exclusive. * Shifts any succeeding elements to the left (reduces their index). * This call shortens the list by {@code (toIndex - fromIndex)} elements. * (If {@code toIndex==fromIndex}, this operation has no effect.) * * @throws IndexOutOfBoundsException if {@code fromIndex} or * {@code toIndex} is out of range * ({@code fromIndex < 0 || * fromIndex >= size() || * toIndex > size() || * toIndex < fromIndex}) */ public void removeRange(int fromIndex, int toIndex) { int numMoved = size - toIndex; System.arraycopy(doubles, toIndex, doubles, fromIndex, numMoved); size = size - (toIndex - fromIndex); } /** * Trims the capacity of this <tt>ArrayList</tt> instance to be the * list's current size. An application can use this operation to minimize * the storage of an <tt>ArrayList</tt> instance. */ private double[] trimToSize(int size, double[] elements) { double[] copy = Arrays.copyOf(elements, elements.length); if (size < elements.length) { copy = (size == 0) ? EMPTY_ELEMENT_DATA : Arrays.copyOf(elements, size); } return copy; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj == this) { return true; } if (obj.getClass() != getClass()) { return false; } DoubleList rhs = (DoubleList) obj; double[] thisTrimmed = trimToSize(this.size, this.doubles); double[] otherTrimmed = trimToSize(rhs.size, rhs.doubles); return new EqualsBuilder() .append(thisTrimmed, otherTrimmed) .append(this.size, rhs.size) .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder() .append(doubles) .append(size) .toHashCode(); } @Override public String toString() { return new ToStringBuilder(this) .append("doubles", trimToSize(this.size, doubles)) .append("size", size) .toString(); } /** * @return maximum of the values of the list */ public double max() { if (size <= 0) { return Double.NaN; } double max = Double.MIN_VALUE; for (int i = 0; i < size; i++) { max = doubles[i] > max ? doubles[i] : max; } return max; } /** * @return minimum of the values of the list */ public double min() { if (size <= 0) { return Double.NaN; } double min = Double.MAX_VALUE; for (int i = 0; i < size; i++) { min = doubles[i] < min ? doubles[i] : min; } return min; } /** * @return average of the values of the list */ public double avg() { if (size <= 0) { return Double.NaN; } double current = 0; for (int i = 0; i < size; i++) { current += doubles[i]; } return current / size; } /** * @param scale to be applied to the values of this list * @return a new instance scaled with the given parameter */ public DoubleList scale(double scale) { DoubleList scaled = new DoubleList(size); for (int i = 0; i < size; i++) { scaled.add(doubles[i] * scale); } return scaled; } /** * Calculates the standard deviation * * @return the standard deviation */ public double stdDeviation() { if (isEmpty()) { return Double.NaN; } return Math.sqrt(variance()); } private double mean() { double sum = 0.0; for (int i = 0; i < size(); i++) { sum = sum + get(i); } return sum / size(); } private double variance() { double avg = mean(); double sum = 0.0; for (int i = 0; i < size(); i++) { double value = get(i); sum += (value - avg) * (value - avg); } return sum / (size() - 1); } /** * Implemented the quantile type 7 referred to * http://tolstoy.newcastle.edu.au/R/e17/help/att-1067/Quartiles_in_R.pdf * and * http://stat.ethz.ch/R-manual/R-patched/library/stats/html/quantile.html * as its the default quantile implementation * <p> * <code> * QuantileType7 = function (v, p) { * v = sort(v) * h = ((length(v)-1)*p)+1 * v[floor(h)]+((h-floor(h))*(v[floor(h)+1]- v[floor(h)])) * } * </code> * * @param percentile - the percentile (0 - 1), e.g. 0.25 * @return the value of the n-th percentile */ public double percentile(double percentile) { double[] copy = toArray(); Arrays.sort(copy);// Attention: this is only necessary because this list is not restricted to non-descending values return evaluateForDoubles(copy, percentile); } private static double evaluateForDoubles(double[] points, double percentile) { //For example: //values = [1,2,2,3,3,3,4,5,6], size = 9, percentile (e.g. 0.25) // size - 1 = 8 * 0.25 = 2 (~ 25% from 9) + 1 = 3 => values[3] => 2 double percentileIndex = ((points.length - 1) * percentile) + 1; double rawMedian = points[floor(percentileIndex - 1)]; double weight = percentileIndex - floor(percentileIndex); if (weight > 0) { double pointDistance = points[floor(percentileIndex - 1) + 1] - points[floor(percentileIndex - 1)]; return rawMedian + weight * pointDistance; } else { return rawMedian; } } /** * Wraps the Math.floor function and casts it to an integer * * @param value - the evaluatedValue * @return the floored evaluatedValue */ private static int floor(double value) { return (int) Math.floor(value); } }<|fim▁end|>
* @return a clone of this <tt>LongList</tt> instance */ public DoubleList copy() { DoubleList v = new DoubleList(size);
<|file_name|>client.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Istio Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package kube implements the shared and reusable library for Kubernetes package kube import ( "fmt" "os" "os/user" multierror "github.com/hashicorp/go-multierror" "k8s.io/client-go/kubernetes" "k8s.io/client-go/tools/clientcmd" clientcmdapi "k8s.io/client-go/tools/clientcmd/api" "istio.io/istio/pkg/log" // import GKE cluster authentication plugin _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" // import OIDC cluster authentication plugin, e.g. for Tectonic _ "k8s.io/client-go/plugin/pkg/client/auth/oidc" ) // ResolveConfig checks whether to use the in-cluster or out-of-cluster config func ResolveConfig(kubeconfig string) (string, error) { // Consistency with kubectl if kubeconfig == "" { kubeconfig = os.Getenv("KUBECONFIG") } if kubeconfig == "" { usr, err := user.Current() if err == nil { defaultCfg := usr.HomeDir + "/.kube/config"<|fim▁hole|> } } } if kubeconfig != "" { info, err := os.Stat(kubeconfig) if err != nil { if os.IsNotExist(err) { err = fmt.Errorf("kubernetes configuration file %q does not exist", kubeconfig) } else { err = multierror.Append(err, fmt.Errorf("kubernetes configuration file %q", kubeconfig)) } return "", err } // if it's an empty file, switch to in-cluster config if info.Size() == 0 { log.Info("using in-cluster configuration") return "", nil } } return kubeconfig, nil } // CreateInterface is a helper function to create Kubernetes interface from kubeconfig file func CreateInterface(kubeconfig string) (kubernetes.Interface, error) { restConfig, err := clientcmd.BuildConfigFromFlags("", kubeconfig) if err != nil { return nil, err } return kubernetes.NewForConfig(restConfig) } // CreateInterfaceFromClusterConfig is a helper function to create Kubernetes interface from in memory cluster config struct func CreateInterfaceFromClusterConfig(clusterConfig *clientcmdapi.Config) (kubernetes.Interface, error) { return createInterface(clusterConfig) } // createInterface is new function which creates rest config and kubernetes interface // from passed cluster's config struct func createInterface(clusterConfig *clientcmdapi.Config) (kubernetes.Interface, error) { clientConfig := clientcmd.NewDefaultClientConfig(*clusterConfig, &clientcmd.ConfigOverrides{}) rest, err := clientConfig.ClientConfig() if err != nil { return nil, err } return kubernetes.NewForConfig(rest) }<|fim▁end|>
_, err := os.Stat(kubeconfig) if err != nil { kubeconfig = defaultCfg
<|file_name|>qgsvertextool.cpp<|end_file_name|><|fim▁begin|>/*************************************************************************** qgsvertextool.cpp -------------------------------------- Date : February 2017 Copyright : (C) 2017 by Martin Dobias Email : wonder dot sk at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "qgsvertextool.h" #include "qgsadvanceddigitizingdockwidget.h" #include "qgscurve.h" #include "qgscurvepolygon.h" #include "qgsgeometryutils.h" #include "qgsgeometryvalidator.h" #include "qgslogger.h" #include "qgsmapcanvas.h" #include "qgsmulticurve.h" #include "qgsmultipoint.h" #include "qgspointlocator.h" #include "qgsproject.h" #include "qgsrubberband.h" #include "qgssettings.h" #include "qgssnapindicator.h" #include "qgssnappingutils.h" #include "qgsvectorlayer.h" #include "qgsvertexmarker.h" #include "qgsstatusbar.h" #include "qgisapp.h" #include "qgsselectedfeature.h" #include "qgsvertexeditor.h" #include "qgsvertexentry.h" #include <QMenu> #include <QRubberBand> uint qHash( const Vertex &v ) { return qHash( v.layer ) ^ qHash( v.fid ) ^ qHash( v.vertexId ); } // // geomutils - may get moved elsewhere // //! Find out whether vertex at the given index is an endpoint (assuming linear geometry) static bool isEndpointAtVertexIndex( const QgsGeometry &geom, int vertexIndex ) { const QgsAbstractGeometry *g = geom.constGet(); if ( const QgsCurve *curve = qgsgeometry_cast< const QgsCurve *>( g ) ) { return vertexIndex == 0 || vertexIndex == curve->numPoints() - 1; } else if ( const QgsMultiCurve *multiCurve = qgsgeometry_cast<const QgsMultiCurve *>( g ) ) { for ( int i = 0; i < multiCurve->numGeometries(); ++i ) { QgsCurve *part = qgsgeometry_cast<QgsCurve *>( multiCurve->geometryN( i ) ); Q_ASSERT( part ); if ( vertexIndex < part->numPoints() ) return vertexIndex == 0 || vertexIndex == part->numPoints() - 1; vertexIndex -= part->numPoints(); } Q_ASSERT( false ); // should not get here return false; } else { QgsDebugMsg( "is_endpoint_at_vertex_index: unexpected geometry type!" ); return false; } } //! Returns the index of vertex adjacent to the given endpoint. Assuming linear geometries. int adjacentVertexIndexToEndpoint( const QgsGeometry &geom, int vertexIndex ) { const QgsAbstractGeometry *g = geom.constGet(); if ( const QgsCurve *curve = qgsgeometry_cast<const QgsCurve *>( g ) ) { return vertexIndex == 0 ? 1 : curve->numPoints() - 2; } else if ( const QgsMultiCurve *multiCurve = qgsgeometry_cast<const QgsMultiCurve *>( g ) ) { int offset = 0; for ( int i = 0; i < multiCurve->numGeometries(); ++i ) { const QgsCurve *part = qgsgeometry_cast<const QgsCurve *>( multiCurve->geometryN( i ) ); Q_ASSERT( part ); if ( vertexIndex < part->numPoints() ) return vertexIndex == 0 ? offset + 1 : offset + part->numPoints() - 2; vertexIndex -= part->numPoints(); offset += part->numPoints(); } } else { QgsDebugMsg( "adjacent_vertex_index_to_endpoint: unexpected geometry type!" ); } return -1; } /** * Determine whether a vertex is in the middle of a circular edge or not * (wrapper for slightly awkward API) */ static bool isCircularVertex( const QgsGeometry &geom, int vertexIndex ) { QgsVertexId vid; return geom.vertexIdFromVertexNr( vertexIndex, vid ) && vid.type == QgsVertexId::CurveVertex; } //! Create a multi-point geometry that can be used to highlight vertices of a feature static QgsGeometry geometryToMultiPoint( const QgsGeometry &geom ) { QgsMultiPoint *multiPoint = new QgsMultiPoint(); QgsGeometry outputGeom( multiPoint ); for ( auto pointIt = geom.vertices_begin(); pointIt != geom.vertices_end(); ++pointIt ) multiPoint->addGeometry( ( *pointIt ).clone() ); return outputGeom; } // // snapping match filters // //! a filter to allow just one particular feature class OneFeatureFilter : public QgsPointLocator::MatchFilter { public: OneFeatureFilter( const QgsVectorLayer *layer, QgsFeatureId fid ) : layer( layer ) , fid( fid ) {} bool acceptMatch( const QgsPointLocator::Match &match ) override { return match.layer() == layer && match.featureId() == fid; } private: const QgsVectorLayer *layer = nullptr; QgsFeatureId fid; }; //! a filter just to gather all matches at the same place class MatchCollectingFilter : public QgsPointLocator::MatchFilter { public: QList<QgsPointLocator::Match> matches; QgsVertexTool *vertextool = nullptr; MatchCollectingFilter( QgsVertexTool *vertextool ) : vertextool( vertextool ) {} bool acceptMatch( const QgsPointLocator::Match &match ) override { if ( match.distance() > 0 ) return false; matches.append( match ); // there may be multiple points at the same location, but we get only one // result... the locator API needs a new method verticesInRect() QgsGeometry matchGeom = vertextool->cachedGeometry( match.layer(), match.featureId() ); QgsVertexId vid; QgsPoint pt; while ( matchGeom.constGet()->nextVertex( vid, pt ) ) { int vindex = matchGeom.vertexNrFromVertexId( vid ); if ( pt.x() == match.point().x() && pt.y() == match.point().y() && vindex != match.vertexIndex() ) { QgsPointLocator::Match extra_match( match.type(), match.layer(), match.featureId(), 0, match.point(), vindex ); matches.append( extra_match ); } } return true; } }; /** * Keeps the best match from a selected feature so that we can possibly use it with higher priority. * If we do not encounter any selected feature within tolerance, we use the best match as usual. */ class SelectedMatchFilter : public QgsPointLocator::MatchFilter { public: explicit SelectedMatchFilter( double tol ) : mTolerance( tol ) {} bool acceptMatch( const QgsPointLocator::Match &match ) override { if ( match.distance() <= mTolerance && match.layer() && match.layer()->selectedFeatureIds().contains( match.featureId() ) ) { if ( !mBestSelectedMatch.isValid() || match.distance() < mBestSelectedMatch.distance() ) mBestSelectedMatch = match; } return true; } bool hasSelectedMatch() const { return mBestSelectedMatch.isValid(); } QgsPointLocator::Match bestSelectedMatch() const { return mBestSelectedMatch; } private: double mTolerance; QgsPointLocator::Match mBestSelectedMatch; }; // // // QgsVertexTool::QgsVertexTool( QgsMapCanvas *canvas, QgsAdvancedDigitizingDockWidget *cadDock, VertexToolMode mode ) : QgsMapToolAdvancedDigitizing( canvas, cadDock ) , mMode( mode ) { setAdvancedDigitizingAllowed( false ); mSnapIndicator.reset( new QgsSnapIndicator( canvas ) ); mEdgeCenterMarker = new QgsVertexMarker( canvas ); mEdgeCenterMarker->setIconType( QgsVertexMarker::ICON_CROSS ); mEdgeCenterMarker->setColor( Qt::red ); mEdgeCenterMarker->setPenWidth( 3 ); mEdgeCenterMarker->setVisible( false ); mFeatureBand = createRubberBand( QgsWkbTypes::LineGeometry ); mFeatureBand->setVisible( false ); QColor color = digitizingStrokeColor(); mFeatureBandMarkers = new QgsRubberBand( canvas ); mFeatureBandMarkers->setIcon( QgsRubberBand::ICON_CIRCLE ); mFeatureBandMarkers->setColor( color ); mFeatureBandMarkers->setIconSize( 8 ); mFeatureBandMarkers->setVisible( false ); mVertexBand = new QgsRubberBand( canvas ); mVertexBand->setIcon( QgsRubberBand::ICON_CIRCLE ); mVertexBand->setColor( color ); mVertexBand->setIconSize( 15 ); mVertexBand->setVisible( false ); QColor color2( color ); color2.setAlpha( color2.alpha() / 3 ); mEdgeBand = new QgsRubberBand( canvas ); mEdgeBand->setColor( color2 ); mEdgeBand->setWidth( 10 ); mEdgeBand->setVisible( false ); mEndpointMarker = new QgsVertexMarker( canvas ); mEndpointMarker->setIconType( QgsVertexMarker::ICON_CROSS ); mEndpointMarker->setColor( Qt::red ); mEndpointMarker->setPenWidth( 3 ); mEndpointMarker->setVisible( false ); } QgsVertexTool::~QgsVertexTool() { delete mEdgeCenterMarker; delete mFeatureBand; delete mFeatureBandMarkers; delete mVertexBand; delete mEdgeBand; delete mEndpointMarker; } void QgsVertexTool::deactivate() { setHighlightedVertices( QList<Vertex>() ); removeTemporaryRubberBands(); cleanupVertexEditor(); mSnapIndicator->setMatch( QgsPointLocator::Match() ); QHash< QPair<QgsVectorLayer *, QgsFeatureId>, GeometryValidation>::iterator it = mValidations.begin(); for ( ; it != mValidations.end(); ++it ) it->cleanup(); mValidations.clear(); QgsMapToolAdvancedDigitizing::deactivate(); } void QgsVertexTool::addDragBand( const QgsPointXY &v1, const QgsPointXY &v2 ) { addDragStraightBand( nullptr, v1, v2, false, true, v2 ); } void QgsVertexTool::addDragStraightBand( QgsVectorLayer *layer, QgsPointXY v0, QgsPointXY v1, bool moving0, bool moving1, const QgsPointXY &mapPoint ) { // if layer is not null, the input coordinates are coming in the layer's CRS rather than map CRS if ( layer ) { v0 = toMapCoordinates( layer, v0 ); v1 = toMapCoordinates( layer, v1 ); } StraightBand b; b.band = createRubberBand( QgsWkbTypes::LineGeometry, true ); b.p0 = v0; b.p1 = v1; b.moving0 = moving0; b.moving1 = moving1; b.offset0 = v0 - mapPoint; b.offset1 = v1 - mapPoint; b.band->addPoint( v0 ); b.band->addPoint( v1 ); mDragStraightBands << b; } void QgsVertexTool::addDragCircularBand( QgsVectorLayer *layer, QgsPointXY v0, QgsPointXY v1, QgsPointXY v2, bool moving0, bool moving1, bool moving2, const QgsPointXY &mapPoint ) { // if layer is not null, the input coordinates are coming in the layer's CRS rather than map CRS if ( layer ) { v0 = toMapCoordinates( layer, v0 ); v1 = toMapCoordinates( layer, v1 ); v2 = toMapCoordinates( layer, v2 ); } CircularBand b; b.band = createRubberBand( QgsWkbTypes::LineGeometry, true ); b.p0 = v0; b.p1 = v1; b.p2 = v2; b.moving0 = moving0; b.moving1 = moving1; b.moving2 = moving2; b.offset0 = v0 - mapPoint; b.offset1 = v1 - mapPoint; b.offset2 = v2 - mapPoint; b.updateRubberBand( mapPoint ); mDragCircularBands << b; } void QgsVertexTool::clearDragBands() { qDeleteAll( mDragPointMarkers ); mDragPointMarkers.clear(); mDragPointMarkersOffset.clear(); for ( const StraightBand &b : qgis::as_const( mDragStraightBands ) ) delete b.band; mDragStraightBands.clear(); for ( const CircularBand &b : qgis::as_const( mDragCircularBands ) ) delete b.band; mDragCircularBands.clear(); } void QgsVertexTool::cadCanvasPressEvent( QgsMapMouseEvent *e ) { if ( mSelectionMethod == SelectionRange ) { rangeMethodPressEvent( e ); return; } if ( !mDraggingVertex && !mSelectedVertices.isEmpty() && !( e->modifiers() & Qt::ShiftModifier ) && !( e->modifiers() & Qt::ControlModifier ) ) { // only remove highlight if not clicked on one of highlighted vertices bool clickedOnHighlightedVertex = false; QgsPointLocator::Match m = snapToEditableLayer( e ); if ( m.hasVertex() ) { for ( const Vertex &selectedVertex : qgis::as_const( mSelectedVertices ) ) { if ( selectedVertex.layer == m.layer() && selectedVertex.fid == m.featureId() && selectedVertex.vertexId == m.vertexIndex() ) { clickedOnHighlightedVertex = true; break; } } } if ( !clickedOnHighlightedVertex ) setHighlightedVertices( QList<Vertex>() ); // reset selection } if ( e->button() == Qt::LeftButton ) { if ( e->modifiers() & Qt::ControlModifier || e->modifiers() & Qt::ShiftModifier ) { // shift or ctrl-click vertices to highlight without entering edit mode QgsPointLocator::Match m = snapToEditableLayer( e ); if ( m.hasVertex() ) { Vertex vertex( m.layer(), m.featureId(), m.vertexIndex() ); HighlightMode mode = ModeReset; if ( e->modifiers() & Qt::ShiftModifier ) { // Shift+Click to add vertex to highlight mode = ModeAdd; } else if ( e->modifiers() & Qt::ControlModifier ) { // Ctrl+Click to remove vertex mode = ModeSubtract; } setHighlightedVertices( QList<Vertex>() << vertex, mode ); return; } } // the user may have started dragging a rect to select vertices if ( !mDraggingVertex && !mDraggingEdge ) mSelectionRectStartPos.reset( new QPoint( e->pos() ) ); } if ( e->button() == Qt::RightButton ) { if ( !mSelectionRect && !mDraggingVertex && !mDraggingEdge ) { // show popup menu - if we are on top of a feature if ( mLastMouseMoveMatch.isValid() && mLastMouseMoveMatch.layer() ) { QMenu menu; QAction *actionVertexEditor = menu.addAction( tr( "Vertex Editor" ) ); connect( actionVertexEditor, &QAction::triggered, this, &QgsVertexTool::showVertexEditor ); //#spellok menu.exec( mCanvas->mapToGlobal( e->pos() ) ); } } } } void QgsVertexTool::cadCanvasReleaseEvent( QgsMapMouseEvent *e ) { if ( mSelectionMethod == SelectionRange ) { rangeMethodReleaseEvent( e ); return; } if ( mNewVertexFromDoubleClick ) { QgsPointLocator::Match m( *mNewVertexFromDoubleClick ); mNewVertexFromDoubleClick.reset(); // dragging of edges and double clicking on edges to add vertex are slightly overlapping // so we need to cancel edge moving before we start dragging new vertex stopDragging(); startDraggingAddVertex( m ); } else if ( mSelectionRect ) { // only handling of selection rect being dragged QgsPointXY pt0 = toMapCoordinates( *mSelectionRectStartPos ); QgsPointXY pt1 = toMapCoordinates( e->pos() ); QgsRectangle map_rect( pt0, pt1 ); QList<Vertex> vertices; QList<Vertex> selectedVertices; // for each editable layer, select vertices const auto layers = canvas()->layers(); for ( QgsMapLayer *layer : layers ) { QgsVectorLayer *vlayer = qobject_cast<QgsVectorLayer *>( layer ); if ( !vlayer || !vlayer->isEditable() || !vlayer->isSpatial() ) continue; if ( mMode == ActiveLayer && vlayer != currentVectorLayer() ) continue; QgsRectangle layerRect = toLayerCoordinates( vlayer, map_rect ); QgsFeature f; QgsFeatureIterator fi = vlayer->getFeatures( QgsFeatureRequest( layerRect ).setSubsetOfAttributes( QgsAttributeList() ) ); while ( fi.nextFeature( f ) ) { bool isFeatureSelected = vlayer->selectedFeatureIds().contains( f.id() ); QgsGeometry g = f.geometry(); for ( int i = 0; i < g.constGet()->nCoordinates(); ++i ) { QgsPointXY pt = g.vertexAt( i ); if ( layerRect.contains( pt ) ) { vertices << Vertex( vlayer, f.id(), i ); if ( isFeatureSelected ) selectedVertices << Vertex( vlayer, f.id(), i ); } } } } // If there were any vertices that come from selected features, use just vertices from selected features. // This allows user to select a bunch of features in complex situations to constrain the selection. if ( !selectedVertices.isEmpty() ) vertices = selectedVertices; HighlightMode mode = ModeReset; if ( e->modifiers() & Qt::ShiftModifier ) mode = ModeAdd; else if ( e->modifiers() & Qt::ControlModifier ) mode = ModeSubtract; setHighlightedVertices( vertices, mode ); stopSelectionRect(); } else // selection rect is not being dragged { if ( e->button() == Qt::LeftButton && !( e->modifiers() & Qt::ShiftModifier ) && !( e->modifiers() & Qt::ControlModifier ) ) { // accepting action if ( mDraggingVertex ) { QgsPointLocator::Match match = e->mapPointMatch(); moveVertex( e->mapPoint(), &match ); } else if ( mDraggingEdge ) { // do not use e.mapPoint() as it may be snapped moveEdge( toMapCoordinates( e->pos() ) ); } else { startDragging( e ); } } else if ( e->button() == Qt::RightButton ) { // cancel action stopDragging(); } } mSelectionRectStartPos.reset(); } void QgsVertexTool::cadCanvasMoveEvent( QgsMapMouseEvent *e ) { if ( mSelectionMethod == SelectionRange ) { rangeMethodMoveEvent( e ); return; } if ( mDraggingVertex ) { mouseMoveDraggingVertex( e ); } else if ( mDraggingEdge ) { mouseMoveDraggingEdge( e ); } else if ( mSelectionRectStartPos ) { // the user may be dragging a rect to select vertices if ( !mSelectionRect && ( e->pos() - *mSelectionRectStartPos ).manhattanLength() >= 10 ) { startSelectionRect( *mSelectionRectStartPos ); } if ( mSelectionRect ) { updateSelectionRect( e->pos() ); } } else { mouseMoveNotDragging( e ); } } void QgsVertexTool::mouseMoveDraggingVertex( QgsMapMouseEvent *e ) { mSnapIndicator->setMatch( e->mapPointMatch() ); mEdgeCenterMarker->setVisible( false ); moveDragBands( e->mapPoint() ); } void QgsVertexTool::moveDragBands( const QgsPointXY &mapPoint ) { for ( int i = 0; i < mDragStraightBands.count(); ++i ) { StraightBand &b = mDragStraightBands[i]; if ( b.moving0 ) b.band->movePoint( 0, mapPoint + b.offset0 ); if ( b.moving1 ) b.band->movePoint( 1, mapPoint + b.offset1 ); } for ( int i = 0; i < mDragCircularBands.count(); ++i ) { CircularBand &b = mDragCircularBands[i]; b.updateRubberBand( mapPoint ); } // in case of moving of standalone point geometry for ( int i = 0; i < mDragPointMarkers.count(); ++i ) { QgsVertexMarker *marker = mDragPointMarkers[i]; QgsVector offset = mDragPointMarkersOffset[i]; marker->setCenter( mapPoint + offset ); } // make sure the temporary feature rubber band is not visible removeTemporaryRubberBands(); } void QgsVertexTool::mouseMoveDraggingEdge( QgsMapMouseEvent *e ) { mSnapIndicator->setMatch( QgsPointLocator::Match() ); mEdgeCenterMarker->setVisible( false ); QgsPointXY mapPoint = toMapCoordinates( e->pos() ); // do not use e.mapPoint() as it may be snapped moveDragBands( mapPoint ); } void QgsVertexTool::canvasDoubleClickEvent( QgsMapMouseEvent *e ) { QgsPointLocator::Match m = snapToEditableLayer( e ); if ( !m.hasEdge() ) return; mNewVertexFromDoubleClick.reset( new QgsPointLocator::Match( m ) ); } void QgsVertexTool::removeTemporaryRubberBands() { mFeatureBand->setVisible( false ); mFeatureBandMarkers->setVisible( false ); mFeatureBandLayer = nullptr; mFeatureBandFid = QgsFeatureId(); mVertexBand->setVisible( false ); mEdgeBand->setVisible( false ); mEndpointMarkerCenter.reset(); mEndpointMarker->setVisible( false ); } QgsPointLocator::Match QgsVertexTool::snapToEditableLayer( QgsMapMouseEvent *e ) { QgsSnappingUtils *snapUtils = canvas()->snappingUtils(); QgsSnappingConfig oldConfig = snapUtils->config(); QgsPointLocator::Match m; QgsPointXY mapPoint = toMapCoordinates( e->pos() ); double tol = QgsTolerance::vertexSearchRadius( canvas()->mapSettings() ); QgsSnappingConfig config( QgsProject::instance() ); config.setEnabled( true ); config.setMode( QgsSnappingConfig::AdvancedConfiguration ); config.setIntersectionSnapping( false ); // only snap to layers // if there is a current layer, it should have priority over other layers // because sometimes there may be match from multiple layers at one location // and selecting current layer is an easy way for the user to prioritize a layer if ( QgsVectorLayer *currentVlayer = currentVectorLayer() ) { if ( currentVlayer->isEditable() ) { const auto layers = canvas()->layers(); for ( QgsMapLayer *layer : layers ) { QgsVectorLayer *vlayer = qobject_cast<QgsVectorLayer *>( layer ); if ( !vlayer ) continue; config.setIndividualLayerSettings( vlayer, QgsSnappingConfig::IndividualLayerSettings( vlayer == currentVlayer, QgsSnappingConfig::VertexAndSegment, tol, QgsTolerance::ProjectUnits ) ); } snapUtils->setConfig( config ); SelectedMatchFilter filter( tol ); m = snapUtils->snapToMap( mapPoint, &filter ); // we give priority to snap matches that are from selected features if ( filter.hasSelectedMatch() ) { m = filter.bestSelectedMatch(); mLastSnap.reset(); } } } // if there is no match from the current layer, try to use any editable vector layer if ( !m.isValid() && mMode == AllLayers ) { const auto layers = canvas()->layers(); for ( QgsMapLayer *layer : layers ) { QgsVectorLayer *vlayer = qobject_cast<QgsVectorLayer *>( layer ); if ( !vlayer ) continue; config.setIndividualLayerSettings( vlayer, QgsSnappingConfig::IndividualLayerSettings( vlayer->isEditable(), QgsSnappingConfig::VertexAndSegment, tol, QgsTolerance::ProjectUnits ) ); } snapUtils->setConfig( config ); SelectedMatchFilter filter( tol ); m = snapUtils->snapToMap( mapPoint, &filter ); // we give priority to snap matches that are from selected features if ( filter.hasSelectedMatch() ) { m = filter.bestSelectedMatch(); mLastSnap.reset(); } } // try to stay snapped to previously used feature // so the highlight does not jump around at vertices where features are joined if ( mLastSnap ) { OneFeatureFilter filterLast( mLastSnap->layer(), mLastSnap->featureId() ); QgsPointLocator::Match lastMatch = snapUtils->snapToMap( mapPoint, &filterLast ); // but skip the the previously used feature if it would only snap to segment, while now we have snap to vertex // so that if there is a point on a line, it gets priority (as is usual with combined vertex+segment snapping) bool matchHasVertexLastHasEdge = m.hasVertex() && lastMatch.hasEdge(); if ( lastMatch.isValid() && lastMatch.distance() <= m.distance() && !matchHasVertexLastHasEdge ) { m = lastMatch; } } snapUtils->setConfig( oldConfig ); mLastSnap.reset( new QgsPointLocator::Match( m ) ); return m; } bool QgsVertexTool::isNearEndpointMarker( const QgsPointXY &mapPoint ) { if ( !mEndpointMarkerCenter ) return false; double distMarker = std::sqrt( mEndpointMarkerCenter->sqrDist( mapPoint ) ); double tol = QgsTolerance::vertexSearchRadius( canvas()->mapSettings() ); QgsGeometry geom = cachedGeometryForVertex( *mMouseAtEndpoint ); QgsPointXY vertexPointV2 = geom.vertexAt( mMouseAtEndpoint->vertexId ); QgsPointXY vertexPoint = QgsPointXY( vertexPointV2.x(), vertexPointV2.y() ); double distVertex = std::sqrt( vertexPoint.sqrDist( mapPoint ) ); return distMarker < tol && distMarker < distVertex; } bool QgsVertexTool::isMatchAtEndpoint( const QgsPointLocator::Match &match ) { QgsGeometry geom = cachedGeometry( match.layer(), match.featureId() ); if ( geom.type() != QgsWkbTypes::LineGeometry ) return false; return isEndpointAtVertexIndex( geom, match.vertexIndex() ); } QgsPointXY QgsVertexTool::positionForEndpointMarker( const QgsPointLocator::Match &match ) { QgsGeometry geom = cachedGeometry( match.layer(), match.featureId() ); QgsPointXY pt0 = geom.vertexAt( adjacentVertexIndexToEndpoint( geom, match.vertexIndex() ) ); QgsPointXY pt1 = geom.vertexAt( match.vertexIndex() ); pt0 = toMapCoordinates( match.layer(), pt0 ); pt1 = toMapCoordinates( match.layer(), pt1 ); double dx = pt1.x() - pt0.x(); double dy = pt1.y() - pt0.y(); double dist = 15 * canvas()->mapSettings().mapUnitsPerPixel(); double angle = std::atan2( dy, dx ); // to the top: angle=0, to the right: angle=90, to the left: angle=-90 double x = pt1.x() + std::cos( angle ) * dist; double y = pt1.y() + std::sin( angle ) * dist; return QgsPointXY( x, y ); } void QgsVertexTool::mouseMoveNotDragging( QgsMapMouseEvent *e ) { if ( mMouseAtEndpoint ) { // check if we are still at the endpoint, i.e. whether to keep showing // the endpoint indicator - or go back to snapping to editable layers QgsPointXY mapPoint = toMapCoordinates( e->pos() ); if ( isNearEndpointMarker( mapPoint ) ) { mEndpointMarker->setColor( Qt::red ); mEndpointMarker->update(); // make it clear this would add endpoint, not move the vertex mVertexBand->setVisible( false ); return; } } // do not use snap from mouse event, use our own with any editable layer QgsPointLocator::Match m = snapToEditableLayer( e ); mLastMouseMoveMatch = m; // possibility to move a vertex if ( m.type() == QgsPointLocator::Vertex ) { updateVertexBand( m ); // if we are at an endpoint, let's show also the endpoint indicator // so user can possibly add a new vertex at the end if ( isMatchAtEndpoint( m ) ) { mMouseAtEndpoint.reset( new Vertex( m.layer(), m.featureId(), m.vertexIndex() ) ); mEndpointMarkerCenter.reset( new QgsPointXY( positionForEndpointMarker( m ) ) ); mEndpointMarker->setCenter( *mEndpointMarkerCenter ); mEndpointMarker->setColor( Qt::gray ); mEndpointMarker->setVisible( true ); mEndpointMarker->update(); } else { mMouseAtEndpoint.reset(); mEndpointMarkerCenter.reset(); mEndpointMarker->setVisible( false ); } } else { mVertexBand->setVisible( false ); mMouseAtEndpoint.reset(); mEndpointMarkerCenter.reset(); mEndpointMarker->setVisible( false ); } // possibility to create new vertex here - or to move the edge if ( m.type() == QgsPointLocator::Edge ) { QgsPointXY mapPoint = toMapCoordinates( e->pos() ); bool isCircularEdge = false; QgsPointXY p0, p1; m.edgePoints( p0, p1 ); QgsGeometry geom = cachedGeometry( m.layer(), m.featureId() ); if ( isCircularVertex( geom, m.vertexIndex() ) ) { // circular edge at the first vertex isCircularEdge = true; QgsPointXY pX = toMapCoordinates( m.layer(), geom.vertexAt( m.vertexIndex() - 1 ) ); QgsPointSequence points; QgsGeometryUtils::segmentizeArc( QgsPoint( pX ), QgsPoint( p0 ), QgsPoint( p1 ), points ); mEdgeBand->reset(); for ( const QgsPoint &pt : qgis::as_const( points ) ) mEdgeBand->addPoint( pt ); } else if ( isCircularVertex( geom, m.vertexIndex() + 1 ) ) { // circular edge at the second vertex isCircularEdge = true; QgsPointXY pX = toMapCoordinates( m.layer(), geom.vertexAt( m.vertexIndex() + 2 ) ); QgsPointSequence points; QgsGeometryUtils::segmentizeArc( QgsPoint( p0 ), QgsPoint( p1 ), QgsPoint( pX ), points ); mEdgeBand->reset(); for ( const QgsPoint &pt : qgis::as_const( points ) ) mEdgeBand->addPoint( pt ); } else { // straight edge QgsPolylineXY points; points << p0 << p1; mEdgeBand->setToGeometry( QgsGeometry::fromPolylineXY( points ), nullptr ); } QgsPointXY edgeCenter; bool isNearCenter = matchEdgeCenterTest( m, mapPoint, &edgeCenter ); mEdgeCenterMarker->setCenter( edgeCenter ); mEdgeCenterMarker->setColor( isNearCenter ? Qt::red : Qt::gray ); mEdgeCenterMarker->setVisible( !isCircularEdge ); // currently not supported for circular edges mEdgeCenterMarker->update(); mEdgeBand->setVisible( !isNearCenter ); } else { mEdgeCenterMarker->setVisible( false ); mEdgeBand->setVisible( false ); } updateFeatureBand( m ); } void QgsVertexTool::updateVertexBand( const QgsPointLocator::Match &m ) { if ( m.hasVertex() && m.layer() ) { mVertexBand->setToGeometry( QgsGeometry::fromPointXY( m.point() ), nullptr ); mVertexBand->setVisible( true ); bool isCircular = false; if ( m.layer() ) { isCircular = isCircularVertex( cachedGeometry( m.layer(), m.featureId() ), m.vertexIndex() ); } mVertexBand->setIcon( isCircular ? QgsRubberBand::ICON_FULL_DIAMOND : QgsRubberBand::ICON_CIRCLE ); } else { mVertexBand->setVisible( false ); } } void QgsVertexTool::updateFeatureBand( const QgsPointLocator::Match &m ) { // highlight feature if ( m.isValid() && m.layer() ) { if ( mFeatureBandLayer == m.layer() && mFeatureBandFid == m.featureId() ) return; // skip regeneration of rubber band if not needed QgsGeometry geom = cachedGeometry( m.layer(), m.featureId() ); mFeatureBandMarkers->setToGeometry( geometryToMultiPoint( geom ), m.layer() ); mFeatureBandMarkers->setVisible( true ); if ( QgsWkbTypes::isCurvedType( geom.wkbType() ) ) geom = QgsGeometry( geom.constGet()->segmentize() ); mFeatureBand->setToGeometry( geom, m.layer() ); mFeatureBand->setVisible( true ); mFeatureBandLayer = m.layer(); mFeatureBandFid = m.featureId(); } else { mFeatureBand->setVisible( false ); mFeatureBandMarkers->setVisible( false ); mFeatureBandLayer = nullptr; mFeatureBandFid = QgsFeatureId(); } } void QgsVertexTool::keyPressEvent( QKeyEvent *e ) { if ( !mDraggingVertex && !mDraggingEdge && e->key() == Qt::Key_R && e->modifiers() & Qt::ShiftModifier ) { startRangeVertexSelection(); return; } if ( mSelectionMethod == SelectionRange && e->key() == Qt::Key_Escape ) { stopRangeVertexSelection(); return; } if ( !mDraggingVertex && mSelectedVertices.count() == 0 ) return; if ( e->key() == Qt::Key_Delete || e->key() == Qt::Key_Backspace ) { e->ignore(); // Override default shortcut management deleteVertex(); } else if ( e->key() == Qt::Key_Escape ) { if ( mDraggingVertex ) stopDragging(); } else if ( e->key() == Qt::Key_Less || e->key() == Qt::Key_Comma ) { highlightAdjacentVertex( -1 ); } else if ( e->key() == Qt::Key_Greater || e->key() == Qt::Key_Period ) { highlightAdjacentVertex( + 1 ); } } QgsGeometry QgsVertexTool::cachedGeometry( const QgsVectorLayer *layer, QgsFeatureId fid ) { if ( !mCache.contains( layer ) ) { connect( layer, &QgsVectorLayer::geometryChanged, this, &QgsVertexTool::onCachedGeometryChanged ); connect( layer, &QgsVectorLayer::featureDeleted, this, &QgsVertexTool::onCachedGeometryDeleted ); // TODO: also clear cache when layer is deleted } QHash<QgsFeatureId, QgsGeometry> &layerCache = mCache[layer]; if ( !layerCache.contains( fid ) ) { QgsFeature f; layer->getFeatures( QgsFeatureRequest( fid ).setSubsetOfAttributes( QgsAttributeList() ) ).nextFeature( f ); layerCache[fid] = f.geometry(); } return layerCache[fid]; } QgsGeometry QgsVertexTool::cachedGeometryForVertex( const Vertex &vertex ) { return cachedGeometry( vertex.layer, vertex.fid ); } void QgsVertexTool::onCachedGeometryChanged( QgsFeatureId fid, const QgsGeometry &geom ) { QgsVectorLayer *layer = qobject_cast<QgsVectorLayer *>( sender() ); Q_ASSERT( mCache.contains( layer ) ); QHash<QgsFeatureId, QgsGeometry> &layerCache = mCache[layer]; if ( layerCache.contains( fid ) ) layerCache[fid] = geom; // refresh highlighted vertices - their position may have changed setHighlightedVertices( mSelectedVertices ); // re-run validation for the feature validateGeometry( layer, fid ); } void QgsVertexTool::onCachedGeometryDeleted( QgsFeatureId fid ) { QgsVectorLayer *layer = qobject_cast<QgsVectorLayer *>( sender() ); Q_ASSERT( mCache.contains( layer ) ); QHash<QgsFeatureId, QgsGeometry> &layerCache = mCache[layer]; if ( layerCache.contains( fid ) ) layerCache.remove( fid ); // refresh highlighted vertices - some may have been deleted setHighlightedVertices( mSelectedVertices ); } void QgsVertexTool::showVertexEditor() //#spellok { QgsPointLocator::Match m = mLastMouseMoveMatch; if ( !m.isValid() || !m.layer() ) return; mSelectedFeature.reset( new QgsSelectedFeature( m.featureId(), m.layer(), mCanvas ) ); for ( int i = 0; i < mSelectedVertices.length(); ++i ) { if ( mSelectedVertices.at( i ).layer == m.layer() && mSelectedVertices.at( i ).fid == m.featureId() ) { mSelectedFeature->selectVertex( mSelectedVertices.at( i ).vertexId ); } } if ( !mVertexEditor ) { mVertexEditor.reset( new QgsVertexEditor( m.layer(), mSelectedFeature.get(), mCanvas ) ); QgisApp::instance()->addDockWidget( Qt::LeftDockWidgetArea, mVertexEditor.get() ); connect( mVertexEditor.get(), &QgsVertexEditor::deleteSelectedRequested, this, &QgsVertexTool::deleteVertexEditorSelection ); connect( mVertexEditor.get(), &QgsVertexEditor::editorClosed, this, &QgsVertexTool::cleanupVertexEditor ); } else { mVertexEditor->updateEditor( m.layer(), mSelectedFeature.get() ); } connect( mSelectedFeature.get()->layer(), &QgsVectorLayer::featureDeleted, this, &QgsVertexTool::cleanEditor ); } void QgsVertexTool::cleanupVertexEditor() { mSelectedFeature.reset(); mVertexEditor.reset(); } static int _firstSelectedVertex( QgsSelectedFeature &selectedFeature ) { QList<QgsVertexEntry *> &vertexMap = selectedFeature.vertexMap(); for ( int i = 0, n = vertexMap.size(); i < n; ++i ) { if ( vertexMap[i]->isSelected() ) { return i; } } return -1; } static void _safeSelectVertex( QgsSelectedFeature &selectedFeature, int vertexNr ) { int n = selectedFeature.vertexMap().size(); selectedFeature.selectVertex( ( vertexNr + n ) % n ); } void QgsVertexTool::deleteVertexEditorSelection() { if ( !mSelectedFeature ) return; int firstSelectedIndex = _firstSelectedVertex( *mSelectedFeature ); if ( firstSelectedIndex == -1 ) return; // make a list of selected vertices QList<Vertex> vertices; QList<QgsVertexEntry *> &selFeatureVertices = mSelectedFeature->vertexMap(); QgsVectorLayer *layer = mSelectedFeature->layer(); QgsFeatureId fid = mSelectedFeature->featureId(); QgsGeometry geometry = cachedGeometry( layer, fid ); for ( QgsVertexEntry *vertex : qgis::as_const( selFeatureVertices ) ) { if ( vertex->isSelected() ) { int vertexIndex = geometry.vertexNrFromVertexId( vertex->vertexId() ); if ( vertexIndex != -1 ) vertices.append( Vertex( layer, fid, vertexIndex ) ); } } // now select the vertices and delete them... setHighlightedVertices( vertices ); deleteVertex(); if ( !mSelectedFeature->geometry()->isNull() ) { int nextVertexToSelect = firstSelectedIndex; if ( mSelectedFeature->geometry()->type() == QgsWkbTypes::LineGeometry ) { // for lines we don't wrap around vertex selection when deleting vertices from end of line nextVertexToSelect = std::min( nextVertexToSelect, mSelectedFeature->geometry()->constGet()->nCoordinates() - 1 ); } _safeSelectVertex( *mSelectedFeature, nextVertexToSelect ); } mSelectedFeature->layer()->triggerRepaint(); } void QgsVertexTool::startDragging( QgsMapMouseEvent *e ) { QgsPointXY mapPoint = toMapCoordinates( e->pos() ); if ( isNearEndpointMarker( mapPoint ) ) { startDraggingAddVertexAtEndpoint( mapPoint ); return; } QgsPointLocator::Match m = snapToEditableLayer( e ); if ( !m.isValid() ) return; // activate advanced digitizing dock setAdvancedDigitizingAllowed( true ); // adding a new vertex instead of moving a vertex if ( m.hasEdge() ) { // only start dragging if we are near edge center mapPoint = toMapCoordinates( e->pos() ); bool isNearCenter = matchEdgeCenterTest( m, mapPoint ); if ( isNearCenter ) startDraggingAddVertex( m ); else startDraggingEdge( m, mapPoint ); } else // vertex { startDraggingMoveVertex( m ); } } void QgsVertexTool::startDraggingMoveVertex( const QgsPointLocator::Match &m ) { Q_ASSERT( m.hasVertex() ); QgsGeometry geom = cachedGeometry( m.layer(), m.featureId() ); // start dragging of snapped point of current layer mDraggingVertex.reset( new Vertex( m.layer(), m.featureId(), m.vertexIndex() ) ); mDraggingVertexType = MovingVertex; mDraggingExtraVertices.clear(); mDraggingExtraVerticesOffset.clear(); setHighlightedVerticesVisible( false ); // hide any extra highlight of vertices until we are done with moving QgsPointXY origDraggingVertexPoint = geom.vertexAt( mDraggingVertex->vertexId ); // if there are other highlighted vertices, they should be dragged as well with their offset for ( const Vertex &v : qgis::as_const( mSelectedVertices ) ) { if ( v != *mDraggingVertex ) { QgsPointXY origPointV = cachedGeometryForVertex( v ).vertexAt( v.vertexId ); QgsPointXY origPointLayer = origDraggingVertexPoint; if ( v.layer->crs() != mDraggingVertex->layer->crs() ) // reproject if necessary origPointLayer = toLayerCoordinates( v.layer, toMapCoordinates( m.layer(), origDraggingVertexPoint ) ); QgsVector offset = origPointV - origPointLayer; mDraggingExtraVertices << v; mDraggingExtraVerticesOffset << offset; } } cadDockWidget()->setPoints( QList<QgsPointXY>() << m.point() << m.point() ); if ( QgsProject::instance()->topologicalEditing() ) { // support for topo editing - find extra features // that have coincident point with the vertex being dragged const auto layers = canvas()->layers(); for ( QgsMapLayer *layer : layers ) { QgsVectorLayer *vlayer = qobject_cast<QgsVectorLayer *>( layer ); if ( !vlayer || !vlayer->isEditable() ) continue; const auto snappedVertices = layerVerticesSnappedToPoint( vlayer, m.point() ); for ( const QgsPointLocator::Match &otherMatch : snappedVertices ) { if ( otherMatch.layer() == m.layer() && otherMatch.featureId() == m.featureId() && otherMatch.vertexIndex() == m.vertexIndex() ) continue; // start dragging of snapped point of current layer mDraggingExtraVertices << Vertex( otherMatch.layer(), otherMatch.featureId(), otherMatch.vertexIndex() ); mDraggingExtraVerticesOffset << QgsVector(); // topo vertices have the same position } } } // now build drag rubber bands for extra vertices QSet<Vertex> movingVertices; movingVertices << *mDraggingVertex; for ( const Vertex &v : qgis::as_const( mDraggingExtraVertices ) ) movingVertices << v; QgsPointXY dragVertexMapPoint = m.point(); buildDragBandsForVertices( movingVertices, dragVertexMapPoint ); } void QgsVertexTool::buildDragBandsForVertices( const QSet<Vertex> &movingVertices, const QgsPointXY &dragVertexMapPoint ) { QSet<Vertex> verticesInStraightBands; // always the vertex with lower index // set of middle vertices that are already in a circular rubber band // i.e. every circular band is defined by its middle circular vertex QSet<Vertex> verticesInCircularBands; for ( const Vertex &v : qgis::as_const( movingVertices ) ) { int v0idx, v1idx; QgsGeometry geom = cachedGeometry( v.layer, v.fid ); QgsPointXY pt = geom.vertexAt( v.vertexId ); geom.adjacentVertices( v.vertexId, v0idx, v1idx ); if ( v0idx != -1 && v1idx != -1 && isCircularVertex( geom, v.vertexId ) ) { // the vertex is in the middle of a curved segment if ( !verticesInCircularBands.contains( v ) ) { addDragCircularBand( v.layer, geom.vertexAt( v0idx ), pt, geom.vertexAt( v1idx ), movingVertices.contains( Vertex( v.layer, v.fid, v0idx ) ), true, movingVertices.contains( Vertex( v.layer, v.fid, v1idx ) ), dragVertexMapPoint ); verticesInCircularBands << v; } // skip the rest - no need for further straight of circular bands for this vertex // because our circular rubber band spans both towards left and right continue; } if ( v0idx != -1 ) { // there is another vertex to the left - let's build a rubber band for it Vertex v0( v.layer, v.fid, v0idx ); if ( isCircularVertex( geom, v0idx ) ) { // circular segment to the left if ( !verticesInCircularBands.contains( v0 ) ) { addDragCircularBand( v.layer, geom.vertexAt( v0idx - 1 ), geom.vertexAt( v0idx ), pt, movingVertices.contains( Vertex( v.layer, v.fid, v0idx - 1 ) ), movingVertices.contains( Vertex( v.layer, v.fid, v0idx ) ), true, dragVertexMapPoint ); verticesInCircularBands << v0; } } else { // straight segment to the left if ( !verticesInStraightBands.contains( v0 ) ) { addDragStraightBand( v.layer, geom.vertexAt( v0idx ), pt, movingVertices.contains( v0 ), true, dragVertexMapPoint ); verticesInStraightBands << v0; } } } if ( v1idx != -1 ) { // there is another vertex to the right - let's build a rubber band for it Vertex v1( v.layer, v.fid, v1idx ); if ( isCircularVertex( geom, v1idx ) ) { // circular segment to the right if ( !verticesInCircularBands.contains( v1 ) ) { addDragCircularBand( v.layer, pt, geom.vertexAt( v1idx ), geom.vertexAt( v1idx + 1 ), true, movingVertices.contains( v1 ), movingVertices.contains( Vertex( v.layer, v.fid, v1idx + 1 ) ), dragVertexMapPoint ); verticesInCircularBands << v1; } } else { // straight segment to the right if ( !verticesInStraightBands.contains( v ) ) { addDragStraightBand( v.layer, pt, geom.vertexAt( v1idx ), true, movingVertices.contains( v1 ), dragVertexMapPoint ); verticesInStraightBands << v; } } } if ( v0idx == -1 && v1idx == -1 ) { // this is a standalone point - we need to use a marker for it // to give some feedback to the user QgsPointXY ptMapPoint = toMapCoordinates( v.layer, pt ); QgsVertexMarker *marker = new QgsVertexMarker( mCanvas ); marker->setIconType( QgsVertexMarker::ICON_X ); marker->setColor( Qt::red ); marker->setPenWidth( 3 ); marker->setVisible( true ); marker->setCenter( ptMapPoint ); mDragPointMarkers << marker; mDragPointMarkersOffset << ( ptMapPoint - dragVertexMapPoint ); } } } QList<QgsPointLocator::Match> QgsVertexTool::layerVerticesSnappedToPoint( QgsVectorLayer *layer, const QgsPointXY &mapPoint ) { MatchCollectingFilter myfilter( this ); QgsPointLocator *loc = canvas()->snappingUtils()->locatorForLayer( layer ); loc->nearestVertex( mapPoint, 0, &myfilter ); return myfilter.matches; } void QgsVertexTool::startDraggingAddVertex( const QgsPointLocator::Match &m ) { Q_ASSERT( m.hasEdge() ); // activate advanced digitizing dock setAdvancedDigitizingAllowed( true ); mDraggingVertex.reset( new Vertex( m.layer(), m.featureId(), m.vertexIndex() + 1 ) ); mDraggingVertexType = AddingVertex; mDraggingExtraVertices.clear(); mDraggingExtraVerticesOffset.clear(); QgsGeometry geom = cachedGeometry( m.layer(), m.featureId() ); // TODO: handles rings correctly? QgsPointXY v0 = geom.vertexAt( m.vertexIndex() ); QgsPointXY v1 = geom.vertexAt( m.vertexIndex() + 1 ); QgsPointXY map_v0 = toMapCoordinates( m.layer(), v0 ); QgsPointXY map_v1 = toMapCoordinates( m.layer(), v1 ); if ( v0.x() != 0 || v0.y() != 0 ) addDragBand( map_v0, m.point() ); if ( v1.x() != 0 || v1.y() != 0 ) addDragBand( map_v1, m.point() ); cadDockWidget()->setPoints( QList<QgsPointXY>() << m.point() << m.point() ); } void QgsVertexTool::startDraggingAddVertexAtEndpoint( const QgsPointXY &mapPoint ) { Q_ASSERT( mMouseAtEndpoint ); // activate advanced digitizing dock setAdvancedDigitizingAllowed( true ); mDraggingVertex.reset( new Vertex( mMouseAtEndpoint->layer, mMouseAtEndpoint->fid, mMouseAtEndpoint->vertexId ) ); mDraggingVertexType = AddingEndpoint; mDraggingExtraVertices.clear(); mDraggingExtraVerticesOffset.clear(); QgsGeometry geom = cachedGeometry( mMouseAtEndpoint->layer, mMouseAtEndpoint->fid ); QgsPointXY v0 = geom.vertexAt( mMouseAtEndpoint->vertexId ); QgsPointXY map_v0 = toMapCoordinates( mMouseAtEndpoint->layer, v0 ); addDragBand( map_v0, mapPoint ); // setup CAD dock previous points to endpoint and the previous point QgsPointXY pt0 = geom.vertexAt( adjacentVertexIndexToEndpoint( geom, mMouseAtEndpoint->vertexId ) ); QgsPointXY pt1 = geom.vertexAt( mMouseAtEndpoint->vertexId ); cadDockWidget()->setPoints( QList<QgsPointXY>() << pt0 << pt1 << pt1 ); } void QgsVertexTool::startDraggingEdge( const QgsPointLocator::Match &m, const QgsPointXY &mapPoint ) { Q_ASSERT( m.hasEdge() ); // activate advanced digitizing setAdvancedDigitizingAllowed( true ); mDraggingEdge = true; mDraggingExtraVertices.clear(); mDraggingExtraVerticesOffset.clear(); QgsGeometry geom = cachedGeometry( m.layer(), m.featureId() ); QSet<Vertex> movingVertices; movingVertices << Vertex( m.layer(), m.featureId(), m.vertexIndex() ); movingVertices << Vertex( m.layer(), m.featureId(), m.vertexIndex() + 1 ); // add an extra vertex if it is circular edge - so that we move the whole edge and not just one part of it if ( isCircularVertex( geom, m.vertexIndex() ) ) { movingVertices << Vertex( m.layer(), m.featureId(), m.vertexIndex() - 1 ); } else if ( isCircularVertex( geom, m.vertexIndex() + 1 ) ) { movingVertices << Vertex( m.layer(), m.featureId(), m.vertexIndex() + 2 ); } buildDragBandsForVertices( movingVertices, mapPoint ); QgsPointXY layerPoint = toLayerCoordinates( m.layer(), mapPoint ); for ( const Vertex &v : qgis::as_const( movingVertices ) ) { mDraggingExtraVertices << v; mDraggingExtraVerticesOffset << ( geom.vertexAt( v.vertexId ) - QgsPoint( layerPoint ) ); } cadDockWidget()->setPoints( QList<QgsPointXY>() << m.point() << m.point() ); } void QgsVertexTool::stopDragging() { // deactivate advanced digitizing setAdvancedDigitizingAllowed( false ); cadDockWidget()->clear(); // clear cad points and release locks mDraggingVertex.reset(); mDraggingVertexType = NotDragging; mDraggingEdge = false; clearDragBands(); setHighlightedVerticesVisible( true ); // highlight can be shown again mSnapIndicator->setMatch( QgsPointLocator::Match() ); } QgsPointXY QgsVertexTool::matchToLayerPoint( const QgsVectorLayer *destLayer, const QgsPointXY &mapPoint, const QgsPointLocator::Match *match ) { // try to use point coordinates in the original CRS if it is the same if ( match && match->hasVertex() && match->layer() && match->layer()->crs() == destLayer->crs() ) { QgsFeature f; QgsFeatureIterator fi = match->layer()->getFeatures( QgsFeatureRequest( match->featureId() ).setSubsetOfAttributes( QgsAttributeList() ) ); if ( fi.nextFeature( f ) ) return f.geometry().vertexAt( match->vertexIndex() ); } // fall back to reprojection of the map point to layer point if they are not the same CRS return toLayerCoordinates( destLayer, mapPoint ); } void QgsVertexTool::moveEdge( const QgsPointXY &mapPoint ) { stopDragging(); VertexEdits edits; addExtraVerticesToEdits( edits, mapPoint ); applyEditsToLayers( edits ); } void QgsVertexTool::moveVertex( const QgsPointXY &mapPoint, const QgsPointLocator::Match *mapPointMatch ) { // deactivate advanced digitizing setAdvancedDigitizingAllowed( false ); QgsVectorLayer *dragLayer = mDraggingVertex->layer; QgsFeatureId dragFid = mDraggingVertex->fid; int dragVertexId = mDraggingVertex->vertexId; bool addingVertex = mDraggingVertexType == AddingVertex || mDraggingVertexType == AddingEndpoint; bool addingAtEndpoint = mDraggingVertexType == AddingEndpoint; QgsGeometry geom = cachedGeometryForVertex( *mDraggingVertex ); stopDragging(); QgsPointXY layerPoint = matchToLayerPoint( dragLayer, mapPoint, mapPointMatch ); <|fim▁hole|> if ( !geom.vertexIdFromVertexNr( dragVertexId, vid ) ) { QgsDebugMsg( "invalid vertex index" ); return; } QgsAbstractGeometry *geomTmp = geom.constGet()->clone(); // add/move vertex if ( addingVertex ) { if ( addingAtEndpoint && vid.vertex != 0 ) // appending? vid.vertex++; QgsPoint pt( layerPoint ); if ( QgsWkbTypes::hasZ( dragLayer->wkbType() ) ) pt.addZValue( defaultZValue() ); if ( !geomTmp->insertVertex( vid, pt ) ) { QgsDebugMsg( "append vertex failed!" ); return; } } else { if ( !geomTmp->moveVertex( vid, QgsPoint( layerPoint ) ) ) { QgsDebugMsg( "move vertex failed!" ); return; } } geom.set( geomTmp ); VertexEdits edits; // dict { layer : { fid : geom } } edits[dragLayer][dragFid] = geom; addExtraVerticesToEdits( edits, mapPoint, dragLayer, layerPoint ); applyEditsToLayers( edits ); if ( QgsProject::instance()->topologicalEditing() && mapPointMatch->hasEdge() && mapPointMatch->layer() ) { // topo editing: add vertex to existing segments when moving/adding a vertex to such segment. // this requires that the snapping match is to a segment and the segment layer's CRS // is the same (otherwise we would need to reproject the point and it will not be coincident) const auto editKeys = edits.keys(); for ( QgsVectorLayer *layer : editKeys ) { if ( layer->crs() == mapPointMatch->layer()->crs() ) { layer->addTopologicalPoints( layerPoint ); } } } setHighlightedVertices( mSelectedVertices ); // update positions of existing highlighted vertices setHighlightedVerticesVisible( true ); // time to show highlighted vertices again } void QgsVertexTool::addExtraVerticesToEdits( QgsVertexTool::VertexEdits &edits, const QgsPointXY &mapPoint, QgsVectorLayer *dragLayer, const QgsPointXY &layerPoint ) { Q_ASSERT( mDraggingExtraVertices.count() == mDraggingExtraVerticesOffset.count() ); // add moved vertices from other layers for ( int i = 0; i < mDraggingExtraVertices.count(); ++i ) { const Vertex &topo = mDraggingExtraVertices[i]; const QgsVector &offset = mDraggingExtraVerticesOffset[i]; QHash<QgsFeatureId, QgsGeometry> &layerEdits = edits[topo.layer]; QgsGeometry topoGeom; if ( layerEdits.contains( topo.fid ) ) topoGeom = QgsGeometry( edits[topo.layer][topo.fid] ); else topoGeom = QgsGeometry( cachedGeometryForVertex( topo ) ); QgsPointXY point; if ( dragLayer && topo.layer->crs() == dragLayer->crs() ) point = layerPoint; // this point may come from exact match so it may be more precise else point = toLayerCoordinates( topo.layer, mapPoint ); if ( offset.x() || offset.y() ) { point += offset; } if ( !topoGeom.moveVertex( point.x(), point.y(), topo.vertexId ) ) { QgsDebugMsg( "[topo] move vertex failed!" ); continue; } edits[topo.layer][topo.fid] = topoGeom; } } void QgsVertexTool::applyEditsToLayers( QgsVertexTool::VertexEdits &edits ) { QHash<QgsVectorLayer *, QHash<QgsFeatureId, QgsGeometry> >::iterator it = edits.begin(); for ( ; it != edits.end(); ++it ) { QgsVectorLayer *layer = it.key(); QHash<QgsFeatureId, QgsGeometry> &layerEdits = it.value(); layer->beginEditCommand( tr( "Moved vertex" ) ); QHash<QgsFeatureId, QgsGeometry>::iterator it2 = layerEdits.begin(); for ( ; it2 != layerEdits.end(); ++it2 ) layer->changeGeometry( it2.key(), it2.value() ); layer->endEditCommand(); layer->triggerRepaint(); } } void QgsVertexTool::deleteVertex() { QSet<Vertex> toDelete; if ( !mSelectedVertices.isEmpty() ) { toDelete = QSet<Vertex>::fromList( mSelectedVertices ); } else { bool addingVertex = mDraggingVertexType == AddingVertex || mDraggingVertexType == AddingEndpoint; toDelete << *mDraggingVertex; toDelete += QSet<Vertex>::fromList( mDraggingExtraVertices ); if ( addingVertex ) { stopDragging(); return; // just cancel the vertex } } stopDragging(); setHighlightedVertices( QList<Vertex>() ); // reset selection if ( QgsProject::instance()->topologicalEditing() ) { // if topo editing is enabled, delete all the vertices that are on the same location QSet<Vertex> topoVerticesToDelete; for ( const Vertex &vertexToDelete : qgis::as_const( toDelete ) ) { QgsPointXY layerPt = cachedGeometryForVertex( vertexToDelete ).vertexAt( vertexToDelete.vertexId ); QgsPointXY mapPt = toMapCoordinates( vertexToDelete.layer, layerPt ); const auto snappedVertices = layerVerticesSnappedToPoint( vertexToDelete.layer, mapPt ); for ( const QgsPointLocator::Match &otherMatch : snappedVertices ) { Vertex otherVertex( otherMatch.layer(), otherMatch.featureId(), otherMatch.vertexIndex() ); if ( toDelete.contains( otherVertex ) || topoVerticesToDelete.contains( otherVertex ) ) continue; topoVerticesToDelete.insert( otherVertex ); } } toDelete.unite( topoVerticesToDelete ); } // switch from a plain list to dictionary { layer: { fid: [vertexNr1, vertexNr2, ...] } } QHash<QgsVectorLayer *, QHash<QgsFeatureId, QList<int> > > toDeleteGrouped; for ( const Vertex &vertex : qgis::as_const( toDelete ) ) { toDeleteGrouped[vertex.layer][vertex.fid].append( vertex.vertexId ); } // de-duplicate vertices in linear rings - if there is the first vertex selected, // then also the last vertex will be selected - but we want just one out of the pair QHash<QgsVectorLayer *, QHash<QgsFeatureId, QList<int> > >::iterator itX = toDeleteGrouped.begin(); for ( ; itX != toDeleteGrouped.end(); ++itX ) { QgsVectorLayer *layer = itX.key(); QHash<QgsFeatureId, QList<int> > &featuresDict = itX.value(); QHash<QgsFeatureId, QList<int> >::iterator it2 = featuresDict.begin(); for ( ; it2 != featuresDict.end(); ++it2 ) { QgsFeatureId fid = it2.key(); QList<int> &vertexIds = it2.value(); if ( vertexIds.count() >= 2 && layer->geometryType() == QgsWkbTypes::PolygonGeometry ) { QSet<int> duplicateVertexIndices; QgsGeometry geom = cachedGeometry( layer, fid ); for ( int i = 0; i < vertexIds.count(); ++i ) { QgsVertexId vid; if ( geom.vertexIdFromVertexNr( vertexIds[i], vid ) ) { int ringVertexCount = geom.constGet()->vertexCount( vid.part, vid.ring ); if ( vid.vertex == ringVertexCount - 1 ) { // this is the last vertex of the ring - remove the first vertex from the list duplicateVertexIndices << geom.vertexNrFromVertexId( QgsVertexId( vid.part, vid.ring, 0 ) ); } } } // now delete the duplicities for ( int duplicateVertexIndex : qgis::as_const( duplicateVertexIndices ) ) vertexIds.removeOne( duplicateVertexIndex ); } } } // main for cycle to delete all selected vertices QHash<QgsVectorLayer *, QHash<QgsFeatureId, QList<int> > >::iterator it = toDeleteGrouped.begin(); for ( ; it != toDeleteGrouped.end(); ++it ) { QgsVectorLayer *layer = it.key(); QHash<QgsFeatureId, QList<int> > &featuresDict = it.value(); layer->beginEditCommand( tr( "Deleted vertex" ) ); bool success = true; QHash<QgsFeatureId, QList<int> >::iterator it2 = featuresDict.begin(); for ( ; it2 != featuresDict.end(); ++it2 ) { QgsFeatureId fid = it2.key(); QList<int> &vertexIds = it2.value(); bool res = QgsVectorLayer::Success; std::sort( vertexIds.begin(), vertexIds.end(), std::greater<int>() ); for ( int vertexId : vertexIds ) { if ( res != QgsVectorLayer::EmptyGeometry ) res = layer->deleteVertex( fid, vertexId ); if ( res != QgsVectorLayer::EmptyGeometry && res != QgsVectorLayer::Success ) { QgsDebugMsg( QString( "failed to delete vertex %1 %2 %3!" ).arg( layer->name() ).arg( fid ).arg( vertexId ) ); success = false; } } if ( res == QgsVectorLayer::EmptyGeometry ) { emit messageEmitted( tr( "Geometry has been cleared. Use the add part tool to set geometry for this feature." ) ); } } if ( success ) { layer->endEditCommand(); layer->triggerRepaint(); } else layer->destroyEditCommand(); } // make sure the temporary feature rubber band is not visible removeTemporaryRubberBands(); // pre-select next vertex for deletion if we are deleting just one vertex if ( toDelete.count() == 1 ) { const Vertex &vertex = *toDelete.constBegin(); QgsGeometry geom( cachedGeometryForVertex( vertex ) ); int vertexId = vertex.vertexId; // if next vertex is not available, use the previous one if ( geom.vertexAt( vertexId ) == QgsPoint() ) vertexId -= 1; if ( geom.vertexAt( vertexId ) != QgsPoint() ) { QList<Vertex> vertices_new; vertices_new << Vertex( vertex.layer, vertex.fid, vertexId ); setHighlightedVertices( vertices_new ); } } } void QgsVertexTool::setHighlightedVertices( const QList<Vertex> &listVertices, HighlightMode mode ) { if ( mode == ModeReset ) { qDeleteAll( mSelectedVerticesMarkers ); mSelectedVerticesMarkers.clear(); mSelectedVertices.clear(); } else if ( mode == ModeSubtract ) { // need to clear vertex markers, and rebuild later. We have no way to link // a marker to a vertex in order to remove one-by-one qDeleteAll( mSelectedVerticesMarkers ); mSelectedVerticesMarkers.clear(); } auto createMarkerForVertex = [ = ]( const Vertex & vertex )->bool { QgsGeometry geom = cachedGeometryForVertex( vertex ); QgsVertexId vid; if ( !geom.vertexIdFromVertexNr( vertex.vertexId, vid ) ) return false; // vertex may not exist anymore QgsVertexMarker *marker = new QgsVertexMarker( canvas() ); marker->setIconType( QgsVertexMarker::ICON_CIRCLE ); marker->setPenWidth( 3 ); marker->setColor( Qt::blue ); marker->setFillColor( Qt::blue ); marker->setCenter( toMapCoordinates( vertex.layer, geom.vertexAt( vertex.vertexId ) ) ); mSelectedVerticesMarkers.append( marker ); return true; }; for ( const Vertex &vertex : listVertices ) { if ( mode == ModeAdd && mSelectedVertices.contains( vertex ) ) { continue; } else if ( mode == ModeSubtract ) { mSelectedVertices.removeAll( vertex ); continue; } if ( !createMarkerForVertex( vertex ) ) continue; // vertex may not exist anymore mSelectedVertices.append( vertex ); } if ( mode == ModeSubtract ) { // rebuild markers for remaining selection for ( const Vertex &vertex : qgis::as_const( mSelectedVertices ) ) { createMarkerForVertex( vertex ); } } } void QgsVertexTool::setHighlightedVerticesVisible( bool visible ) { for ( QgsVertexMarker *marker : qgis::as_const( mSelectedVerticesMarkers ) ) marker->setVisible( visible ); } void QgsVertexTool::highlightAdjacentVertex( double offset ) { if ( mSelectedVertices.isEmpty() ) return; Vertex vertex = mSelectedVertices[0]; // simply use the first one QgsGeometry geom = cachedGeometryForVertex( vertex ); // try to wrap around polygon rings int newVertexId, v0idx, v1idx; geom.adjacentVertices( vertex.vertexId, v0idx, v1idx ); if ( offset == -1 && v0idx != -1 ) newVertexId = v0idx; else if ( offset == 1 && v1idx != -1 ) newVertexId = v1idx; else newVertexId = vertex.vertexId + offset; QgsPointXY pt = geom.vertexAt( newVertexId ); if ( pt != QgsPointXY() ) vertex = Vertex( vertex.layer, vertex.fid, newVertexId ); setHighlightedVertices( QList<Vertex>() << vertex ); zoomToVertex( vertex ); // make sure the vertex is visible } void QgsVertexTool::startSelectionRect( const QPoint &point0 ) { Q_ASSERT( !mSelectionRect ); mSelectionRect.reset( new QRect() ); mSelectionRect->setTopLeft( point0 ); mSelectionRectItem = new QRubberBand( QRubberBand::Rectangle, canvas() ); } void QgsVertexTool::updateSelectionRect( const QPoint &point1 ) { Q_ASSERT( mSelectionRect ); mSelectionRect->setBottomRight( point1 ); mSelectionRectItem->setGeometry( mSelectionRect->normalized() ); mSelectionRectItem->show(); } void QgsVertexTool::stopSelectionRect() { Q_ASSERT( mSelectionRect ); mSelectionRectItem->deleteLater(); mSelectionRectItem = nullptr; mSelectionRect.reset(); } bool QgsVertexTool::matchEdgeCenterTest( const QgsPointLocator::Match &m, const QgsPointXY &mapPoint, QgsPointXY *edgeCenterPtr ) { QgsPointXY p0, p1; m.edgePoints( p0, p1 ); QgsGeometry geom = cachedGeometry( m.layer(), m.featureId() ); if ( isCircularVertex( geom, m.vertexIndex() ) || isCircularVertex( geom, m.vertexIndex() + 1 ) ) return false; // currently not supported for circular edges QgsRectangle visible_extent = canvas()->mapSettings().visibleExtent(); if ( !visible_extent.contains( p0 ) || !visible_extent.contains( p1 ) ) { // clip line segment to the extent so the mid-point marker is always visible QgsGeometry extentGeom = QgsGeometry::fromRect( visible_extent ); QgsGeometry lineGeom = QgsGeometry::fromPolylineXY( QgsPolylineXY() << p0 << p1 ); lineGeom = extentGeom.intersection( lineGeom ); QgsPolylineXY polyline = lineGeom.asPolyline(); Q_ASSERT( polyline.count() == 2 ); p0 = polyline[0]; p1 = polyline[1]; } QgsPointXY edgeCenter( ( p0.x() + p1.x() ) / 2, ( p0.y() + p1.y() ) / 2 ); if ( edgeCenterPtr ) *edgeCenterPtr = edgeCenter; double distFromEdgeCenter = std::sqrt( mapPoint.sqrDist( edgeCenter ) ); double tol = QgsTolerance::vertexSearchRadius( canvas()->mapSettings() ); bool isNearCenter = distFromEdgeCenter < tol; return isNearCenter; } void QgsVertexTool::CircularBand::updateRubberBand( const QgsPointXY &mapPoint ) { QgsPointSequence points; QgsPointXY v0 = moving0 ? mapPoint + offset0 : p0; QgsPointXY v1 = moving1 ? mapPoint + offset1 : p1; QgsPointXY v2 = moving2 ? mapPoint + offset2 : p2; QgsGeometryUtils::segmentizeArc( QgsPoint( v0 ), QgsPoint( v1 ), QgsPoint( v2 ), points ); // it would be useful to have QgsRubberBand::setPoints() call band->reset(); for ( const QgsPoint &p : qgis::as_const( points ) ) band->addPoint( p ); } void QgsVertexTool::validationErrorFound( const QgsGeometry::Error &e ) { QgsGeometryValidator *validator = qobject_cast<QgsGeometryValidator *>( sender() ); if ( !validator ) return; QHash< QPair<QgsVectorLayer *, QgsFeatureId>, GeometryValidation>::iterator it = mValidations.begin(); for ( ; it != mValidations.end(); ++it ) { GeometryValidation &validation = *it; if ( validation.validator == validator ) { validation.addError( e ); break; } } } void QgsVertexTool::validationFinished() { QgsGeometryValidator *validator = qobject_cast<QgsGeometryValidator *>( sender() ); if ( !validator ) return; QHash< QPair<QgsVectorLayer *, QgsFeatureId>, GeometryValidation>::iterator it = mValidations.begin(); for ( ; it != mValidations.end(); ++it ) { GeometryValidation &validation = *it; if ( validation.validator == validator ) { QgsStatusBar *sb = QgisApp::instance()->statusBarIface(); sb->showMessage( tr( "Validation finished (%n error(s) found).", "number of geometry errors", validation.errorMarkers.size() ) ); if ( validation.errorMarkers.isEmpty() ) { // not needed anymore (no markers to keep displayed) validation.cleanup(); mValidations.remove( it.key() ); } break; } } } void QgsVertexTool::GeometryValidation::start( QgsGeometry &geom, QgsVertexTool *t, QgsVectorLayer *l ) { tool = t; layer = l; QgsGeometry::ValidationMethod method = QgsGeometry::ValidatorQgisInternal; QgsSettings settings; if ( settings.value( QStringLiteral( "qgis/digitizing/validate_geometries" ), 1 ).toInt() == 2 ) method = QgsGeometry::ValidatorGeos; validator = new QgsGeometryValidator( geom, nullptr, method ); connect( validator, &QgsGeometryValidator::errorFound, tool, &QgsVertexTool::validationErrorFound ); connect( validator, &QThread::finished, tool, &QgsVertexTool::validationFinished ); validator->start(); } void QgsVertexTool::GeometryValidation::addError( QgsGeometry::Error e ) { if ( !errors.isEmpty() ) errors += '\n'; errors += e.what(); if ( e.hasWhere() ) { QgsVertexMarker *marker = new QgsVertexMarker( tool->canvas() ); marker->setCenter( tool->canvas()->mapSettings().layerToMapCoordinates( layer, e.where() ) ); marker->setIconType( QgsVertexMarker::ICON_X ); marker->setColor( Qt::green ); marker->setZValue( marker->zValue() + 1 ); marker->setPenWidth( 2 ); marker->setToolTip( e.what() ); errorMarkers << marker; } QgsStatusBar *sb = QgisApp::instance()->statusBarIface(); sb->showMessage( e.what() ); sb->setToolTip( errors ); } void QgsVertexTool::GeometryValidation::cleanup() { if ( validator ) { validator->stop(); validator->wait(); validator->deleteLater(); validator = nullptr; } qDeleteAll( errorMarkers ); errorMarkers.clear(); } void QgsVertexTool::validateGeometry( QgsVectorLayer *layer, QgsFeatureId featureId ) { QgsSettings settings; if ( settings.value( QStringLiteral( "qgis/digitizing/validate_geometries" ), 1 ).toInt() == 0 ) return; QPair<QgsVectorLayer *, QgsFeatureId> id( layer, featureId ); if ( mValidations.contains( id ) ) { mValidations[id].cleanup(); mValidations.remove( id ); } GeometryValidation validation; QgsGeometry geom = cachedGeometry( layer, featureId ); validation.start( geom, this, layer ); mValidations.insert( id, validation ); } void QgsVertexTool::zoomToVertex( const Vertex &vertex ) { QgsPointXY newCenter = cachedGeometryForVertex( vertex ).vertexAt( vertex.vertexId ); QgsPointXY mapPoint = mCanvas->mapSettings().layerToMapCoordinates( vertex.layer, newCenter ); QPolygonF ext = mCanvas->mapSettings().visiblePolygon(); if ( !ext.containsPoint( mapPoint.toQPointF(), Qt::OddEvenFill ) ) { mCanvas->setCenter( mapPoint ); mCanvas->refresh(); } } QList<Vertex> QgsVertexTool::verticesInRange( QgsVectorLayer *layer, QgsFeatureId fid, int vertexId0, int vertexId1, bool longWay ) { QgsGeometry geom = cachedGeometry( layer, fid ); if ( vertexId0 > vertexId1 ) std::swap( vertexId0, vertexId1 ); // check it is the same part and ring QgsVertexId vid0, vid1; geom.vertexIdFromVertexNr( vertexId0, vid0 ); geom.vertexIdFromVertexNr( vertexId1, vid1 ); if ( vid0.part != vid1.part || vid0.ring != vid1.ring ) return QList<Vertex>(); // check whether we are in a linear ring int vertexIdTmp = vertexId0 - 1; QgsVertexId vidTmp; while ( geom.vertexIdFromVertexNr( vertexIdTmp, vidTmp ) && vidTmp.part == vid0.part && vidTmp.ring == vid0.ring ) --vertexIdTmp; int startVertexIndex = vertexIdTmp + 1; vertexIdTmp = vertexId1 + 1; while ( geom.vertexIdFromVertexNr( vertexIdTmp, vidTmp ) && vidTmp.part == vid0.part && vidTmp.ring == vid0.ring ) ++vertexIdTmp; int endVertexIndex = vertexIdTmp - 1; QList<Vertex> lst; if ( geom.vertexAt( startVertexIndex ) == geom.vertexAt( endVertexIndex ) ) { // closed curve - we need to find out which way around the curve is shorter double lengthTotal = 0, length0to1 = 0; QgsPoint ptOld = geom.vertexAt( startVertexIndex ); for ( int i = startVertexIndex + 1; i <= endVertexIndex; ++i ) { QgsPoint pt( geom.vertexAt( i ) ); double len = ptOld.distance( pt ); lengthTotal += len; if ( i > vertexId0 && i <= vertexId1 ) length0to1 += len; ptOld = pt; } bool use0to1 = length0to1 < lengthTotal / 2; if ( longWay ) use0to1 = !use0to1; for ( int i = startVertexIndex; i <= endVertexIndex; ++i ) { bool isPickedVertex = i == vertexId0 || i == vertexId1; bool is0to1 = i > vertexId0 && i < vertexId1; if ( isPickedVertex || is0to1 == use0to1 ) lst.append( Vertex( layer, fid, i ) ); } } else { // curve that is not closed for ( int i = vertexId0; i <= vertexId1; ++i ) { lst.append( Vertex( layer, fid, i ) ); } } return lst; } void QgsVertexTool::rangeMethodPressEvent( QgsMapMouseEvent *e ) { // nothing to do here for now... Q_UNUSED( e ); } void QgsVertexTool::rangeMethodReleaseEvent( QgsMapMouseEvent *e ) { if ( e->button() == Qt::RightButton ) { stopRangeVertexSelection(); return; } else if ( e->button() == Qt::LeftButton ) { if ( mRangeSelectionFirstVertex ) { // pick final vertex, make selection and switch back to normal selection QgsPointLocator::Match m = snapToEditableLayer( e ); if ( m.hasVertex() ) { if ( m.layer() == mRangeSelectionFirstVertex->layer && m.featureId() == mRangeSelectionFirstVertex->fid ) { QList<Vertex> lst = verticesInRange( m.layer(), m.featureId(), mRangeSelectionFirstVertex->vertexId, m.vertexIndex(), e->modifiers() & Qt::ControlModifier ); setHighlightedVertices( lst ); mSelectionMethod = SelectionNormal; } } } else { // pick first vertex QgsPointLocator::Match m = snapToEditableLayer( e ); if ( m.hasVertex() ) { mRangeSelectionFirstVertex.reset( new Vertex( m.layer(), m.featureId(), m.vertexIndex() ) ); setHighlightedVertices( QList<Vertex>() << *mRangeSelectionFirstVertex ); } } } } void QgsVertexTool::rangeMethodMoveEvent( QgsMapMouseEvent *e ) { if ( e->buttons() ) return; // only with no buttons pressed QgsPointLocator::Match m = snapToEditableLayer( e ); updateFeatureBand( m ); updateVertexBand( m ); if ( !m.hasVertex() ) { QList<Vertex> lst; if ( mRangeSelectionFirstVertex ) lst << *mRangeSelectionFirstVertex; setHighlightedVertices( lst ); return; } if ( mRangeSelectionFirstVertex ) { // pick temporary final vertex and highlight vertices if ( m.layer() == mRangeSelectionFirstVertex->layer && m.featureId() == mRangeSelectionFirstVertex->fid ) { QList<Vertex> lst = verticesInRange( m.layer(), m.featureId(), mRangeSelectionFirstVertex->vertexId, m.vertexIndex(), e->modifiers() & Qt::ControlModifier ); setHighlightedVertices( lst ); } } } void QgsVertexTool::startRangeVertexSelection() { mSelectionMethod = SelectionRange; setHighlightedVertices( QList<Vertex>() ); mRangeSelectionFirstVertex.reset(); } void QgsVertexTool::stopRangeVertexSelection() { mSelectionMethod = SelectionNormal; setHighlightedVertices( QList<Vertex>() ); } void QgsVertexTool::cleanEditor( QgsFeatureId id ) { if ( mSelectedFeature.get() && mSelectedFeature.get()->featureId() == id ) { cleanupVertexEditor(); }; }<|fim▁end|>
QgsVertexId vid;
<|file_name|>registry.go<|end_file_name|><|fim▁begin|>package proxy import ( "fmt" "github.com/hashicorp/serf/command" "github.com/mitchellh/cli" "math/rand" "strings" ) // Service contains all the addresses for a current service. // The Service current supports random retrieval of available services. type Service struct { // name of service name string // ip and port that host service Addresses []string } // NewService creates a Service instance func NewService(name string) *Service { return &Service{name: name} } // addAddress adds a provided node to the Service func (s *Service) addAddress(addr string) error { s.Addresses = append(s.Addresses, addr) return nil<|fim▁hole|>// getAddress returns a random node that implements the Service func (s *Service) getAddress() (address string, err error) { err = nil address = "" if len(s.Addresses) > 0 { address = s.Addresses[rand.Intn(len(s.Addresses))] } else { err = fmt.Errorf("Address does not exist for service: %s", s.name) } return address, err } // MembersWrite handles serf output (implements Writer interface) type MembersWrite struct { bytes []byte } // Write implements the Writer interface and handles serf output func (w *MembersWrite) Write(p []byte) (n int, err error) { n = len(p) w.bytes = append(w.bytes, p...) err = nil return n, err } // GetString retrieves the string in MembersWrite func (w *MembersWrite) GetString() string { return string(w.bytes[:]) } // Registry contains the mapping of service names to Service objects type Registry struct { services map[string]*Service } // UpdateRegistry computes attached services from serf network func (r *Registry) UpdateRegistry() error { // retrieve members that are alive writer := new(MembersWrite) ui := &cli.BasicUi{Writer: writer} mc := &command.MembersCommand{Ui: ui} var dargs []string dargs = append(dargs, "-status=alive") dargs = append(dargs, "-rpc-addr="+rpcAddr) mc.Run(dargs) mem_str := writer.GetString() mems := strings.Split(strings.Trim(mem_str, "\n"), "\n") r.services = make(map[string]*Service) // parse members from serf output for _, member := range mems { fields := strings.Fields(member) serviceport := strings.Split(fields[0], "#") // there should be no hash marks int the name service_name := serviceport[0] if service_name == "proxy" { continue } if len(serviceport) != 2 { fmt.Errorf("service name incorrectly formatted: %s ", serviceport) continue } complete_address_name := serviceport[1] // service_address := strings.Split(complete_address_name, ":") if len(fields) != 3 { fmt.Errorf("incorrect number of fields for service") continue } // TODO: should make sure that the IP address of serf agent // and the encoded service name are the same // address_fields := strings.Split(fields[1], ":") // serf_address := address_fields[0] // if serf_address != service_address[0] { // fmt.Errorf("Service address does not match serf agent address: %s\n", service_name) // continue // } _, ok := r.services[service_name] var service *Service if ok { service = r.services[service_name] } else { service = NewService(service_name) r.services[service_name] = service } // add new node address to service service.addAddress(complete_address_name) } return nil } // GetActiveNodes returns all nodes associates with a Service func (r *Registry) GetActiveNodes() []string { var nodes []string unique_nodes := make(map[string]bool) for _, service := range r.services { for _, val := range service.Addresses { addr := strings.Split(val, ":")[0] unique_nodes[addr] = true } } for node, _ := range unique_nodes { nodes = append(nodes, node) } return nodes } // GetServicesSlice returns all services associated with the Registry func (r *Registry) GetServicesSlice() []string { var services []string for key, _ := range r.services { services = append(services, key) } return services } // GetServiceAddrs returns nodes for a given service (if it exists) func (r *Registry) GetServiceAddrs(service string) ([]string, error) { var err error _, ok := r.services[service] var addrs []string if ok { serviceInfo := r.services[service] addrs = serviceInfo.Addresses } else { err = fmt.Errorf("Service not in registry: " + service) } return addrs, err } // GetServiceAddr returns node for a given service (if it exists) func (r *Registry) GetServiceAddr(service string) (string, error) { var err error _, ok := r.services[service] addr := "" if ok { serviceInfo := r.services[service] addr, err = serviceInfo.getAddress() } else { err = fmt.Errorf("Service not in registry: " + service) } return addr, err }<|fim▁end|>
}
<|file_name|>demo_satpy_ndvi_decorate.py<|end_file_name|><|fim▁begin|>#from satpy import Scene from satpy.utils import debug_on debug_on() #from glob import glob #base_dir="/data/COALITION2/database/meteosat/radiance_HRIT/case-studies/2015/07/07/" #import os #os.chdir(base_dir) #filenames = glob("*201507071200*__") #print base_dir #print filenames ##global_scene = Scene(reader="hrit_msg", filenames=filenames, base_dir=base_dir, ppp_config_dir="/opt/users/hau/PyTroll//cfg_offline/") #global_scene = Scene(reader="hrit_msg", filenames=filenames, base_dir=base_dir, ppp_config_dir="/opt/users/hau/PyTroll/packages/satpy/satpy/etc") #from satpy import available_readers #available_readers() # new version of satpy after 0.8 ################################# from satpy import find_files_and_readers, Scene from datetime import datetime import numpy as np show_details=False save_overview=True files_sat = find_files_and_readers(sensor='seviri', start_time=datetime(2015, 7, 7, 12, 0), end_time=datetime(2015, 7, 7, 12, 0), base_dir="/data/COALITION2/database/meteosat/radiance_HRIT/case-studies/2015/07/07/", reader="seviri_l1b_hrit") #print files_sat #files = dict(files_sat.items() + files_nwc.items()) files = dict(files_sat.items()) global_scene = Scene(filenames=files) # not allowed any more: reader="hrit_msg", <|fim▁hole|> #global_scene.load([0.6, 0.8, 10.8]) #global_scene.load(['IR_120', 'IR_134']) if save_overview: global_scene.load(['overview',0.6, 0.8]) else: global_scene.load([0.6,0.8]) #print(global_scene[0.6]) # works only if you load also the 0.6 channel, but not an RGB that contains the 0.6 #!!# print(global_scene['overview']) ### this one does only work in the develop version global_scene.available_dataset_names() global_scene["ndvi"] = (global_scene[0.8] - global_scene[0.6]) / (global_scene[0.8] + global_scene[0.6]) # !!! BUG: will not be resampled in global_scene.resample(area) #from satpy import DatasetID #my_channel_id = DatasetID(name='IR_016', calibration='radiance') #global_scene.load([my_channel_id]) #print(scn['IR_016']) #area="eurol" #area="EuropeCanaryS95" area="ccs4" local_scene = global_scene.resample(area) if show_details: help(local_scene) print global_scene.available_composite_ids() print global_scene.available_composite_names() print global_scene.available_dataset_names() print global_scene.available_writers() if save_overview: #local_scene.show('overview') local_scene.save_dataset('overview', './overview_'+area+'.png', overlay={'coast_dir': '/data/OWARNA/hau/maps_pytroll/', 'color': (255, 255, 255), 'resolution': 'i'}) print 'display ./overview_'+area+'.png &' local_scene["ndvi"] = (local_scene[0.8] - local_scene[0.6]) / (local_scene[0.8] + local_scene[0.6]) #local_scene["ndvi"].area = local_scene[0.8].area print "local_scene[\"ndvi\"].min()", local_scene["ndvi"].compute().min() print "local_scene[\"ndvi\"].max()", local_scene["ndvi"].compute().max() lsmask_file="/data/COALITION2/database/LandSeaMask/SEVIRI/LandSeaMask_"+area+".nc" from netCDF4 import Dataset ncfile = Dataset(lsmask_file,'r') # Read variable corresponding to channel name lsmask = ncfile.variables['lsmask'][:,:] # attention [:,:] or [:] is really necessary import dask.array as da #print 'type(local_scene["ndvi"].data)', type(local_scene["ndvi"].data), local_scene["ndvi"].data.compute().shape #print "type(lsmask)", type(lsmask), lsmask.shape, lsmask[:,:,0].shape, #local_scene["ndvi"].data.compute()[lsmask[:,:,0]==0]=np.nan ndvi_numpyarray=local_scene["ndvi"].data.compute() if area=="EuropeCanaryS95": ndvi_numpyarray[lsmask[::-1,:,0]==0]=np.nan else: ndvi_numpyarray[lsmask[:,:,0]==0]=np.nan local_scene["ndvi"].data = da.from_array(ndvi_numpyarray, chunks='auto') #local_scene["ndvi"].data = local_scene["ndvi"].data.where(lsmask!=0) colorized=True if not colorized: #local_scene.save_dataset('ndvi', './ndvi_'+area+'.png') local_scene.save_dataset('ndvi', './ndvi_'+area+'.png', overlay={'coast_dir': '/data/OWARNA/hau/maps_pytroll/', 'color': (255, 255, 255), 'resolution': 'i'}) #print dir(local_scene.save_dataset) else: # https://github.com/pytroll/satpy/issues/459 # from satpy.enhancements import colorize # colorize(img, **kwargs) # 'ylgn' # https://satpy.readthedocs.io/en/latest/writers.html # nice NDVI colourbar here: # https://www.researchgate.net/figure/NDVI-maps-Vegetation-maps-created-by-measuring-the-Normalized-Vegetation-Difference_fig7_323885082 from satpy.composites import BWCompositor from satpy.enhancements import colorize from satpy.writers import to_image compositor = BWCompositor("test", standard_name="ndvi") composite = compositor((local_scene["ndvi"], )) img = to_image(composite) #from trollimage import colormap #dir(colormap) # 'accent', 'blues', 'brbg', 'bugn', 'bupu', 'colorbar', 'colorize', 'dark2', 'diverging_colormaps', 'gnbu', 'greens', # 'greys', 'hcl2rgb', 'np', 'oranges', 'orrd', 'paired', 'palettebar', 'palettize', 'pastel1', 'pastel2', 'piyg', 'prgn', # 'pubu', 'pubugn', 'puor', 'purd', 'purples', 'qualitative_colormaps', 'rainbow', 'rdbu', 'rdgy', 'rdpu', 'rdylbu', 'rdylgn', # 'reds', 'rgb2hcl', 'sequential_colormaps', 'set1', 'set2', 'set3', 'spectral', 'ylgn', 'ylgnbu', 'ylorbr', 'ylorrd' # kwargs = {"palettes": [{"colors": 'ylgn', # "min_value": -0.1, "max_value": 0.9}]} #arr = np.array([[230, 227, 227], [191, 184, 162], [118, 148, 61], [67, 105, 66], [5, 55, 8]]) arr = np.array([ [ 95, 75, 49], [210, 175, 131], [118, 148, 61], [67, 105, 66], [28, 29, 4]]) np.save("/tmp/binary_colormap.npy", arr) kwargs = {"palettes": [{"filename": "/tmp/binary_colormap.npy", "min_value": -0.1, "max_value": 0.8}]} colorize(img, **kwargs) from satpy.writers import add_decorate, add_overlay decorate = { 'decorate': [ {'logo': {'logo_path': '/opt/users/common/logos/meteoSwiss.png', 'height': 60, 'bg': 'white','bg_opacity': 255, 'align': {'top_bottom': 'top', 'left_right': 'right'}}}, {'text': {'txt': ' MSG, '+local_scene.start_time.strftime('%Y-%m-%d %H:%MUTC')+', '+ area+', NDVI', 'align': {'top_bottom': 'top', 'left_right': 'left'}, 'font': "/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf", 'font_size': 19, 'height': 25, 'bg': 'white', 'bg_opacity': 0, 'line': 'white'}} ] } img = add_decorate(img, **decorate) #, fill_value='black' img = add_overlay(img, area, '/data/OWARNA/hau/maps_pytroll/', color='red', width=0.5, resolution='i', level_coast=1, level_borders=1, fill_value=None) #from satpy.writers import compute_writer_results #res1 = scn.save_datasets(filename="/tmp/{name}.png", # writer='simple_image', # compute=False) #res2 = scn.save_datasets(filename="/tmp/{name}.tif", # writer='geotiff', # compute=False) #results = [res1, res2] #compute_writer_results(results) #img.show() img.save('./ndvi_'+area+'.png') print 'display ./ndvi_'+area+'.png &'<|fim▁end|>
print dir(global_scene)
<|file_name|>info_fn_imps.hpp<|end_file_name|><|fim▁begin|>// -*- C++ -*- // Copyright (C) 2005-2015 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the terms // of the GNU General Public License as published by the Free Software // Foundation; either version 3, or (at your option) any later // version. // This library is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // Under Section 7 of GPL version 3, you are granted additional // permissions described in the GCC Runtime Library Exception, version // 3.1, as published by the Free Software Foundation. // You should have received a copy of the GNU General Public License and // a copy of the GCC Runtime Library Exception along with this program; // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see // <http://www.gnu.org/licenses/>. // Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL. // Permission to use, copy, modify, sell, and distribute this software // is hereby granted without fee, provided that the above copyright // notice appears in all copies, and that both that copyright notice // and this permission notice appear in supporting documentation. None // of the above authors, nor IBM Haifa Research Laboratories, make any // representation about the suitability of this software for any // purpose. It is provided "as is" without express or implied // warranty. /** * @file left_child_next_sibling_heap_/info_fn_imps.hpp * Contains an implementation class for left_child_next_sibling_heap_. */ PB_DS_CLASS_T_DEC inline bool PB_DS_CLASS_C_DEC:: empty() const { return (m_size == 0); } PB_DS_CLASS_T_DEC inline typename PB_DS_CLASS_C_DEC::size_type PB_DS_CLASS_C_DEC:: size() const { return (m_size); } PB_DS_CLASS_T_DEC<|fim▁hole|> return (s_node_allocator.max_size()); }<|fim▁end|>
inline typename PB_DS_CLASS_C_DEC::size_type PB_DS_CLASS_C_DEC:: max_size() const {
<|file_name|>Font.js<|end_file_name|><|fim▁begin|>/* ************************************************************************ Copyright: <|fim▁hole|> ************************************************************************ */ qx.Theme.define("qjide.theme.Font", { extend : qx.theme.modern.Font, fonts : { } });<|fim▁end|>
License: Authors:
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import json from django.contrib.auth.models import User from django.core.exceptions import ValidationError from django.urls import reverse from django.db import models from django.utils.html import strip_tags<|fim▁hole|>from six import text_type class Note(models.Model): """ Stores user Notes for the LMS local Notes service. .. pii: Legacy model for an app that edx.org hasn't used since 2013 .. pii_types: other .. pii_retirement: retained """ user = models.ForeignKey(User, db_index=True, on_delete=models.CASCADE) course_id = CourseKeyField(max_length=255, db_index=True) uri = models.CharField(max_length=255, db_index=True) text = models.TextField(default="") quote = models.TextField(default="") range_start = models.CharField(max_length=2048) # xpath string range_start_offset = models.IntegerField() range_end = models.CharField(max_length=2048) # xpath string range_end_offset = models.IntegerField() tags = models.TextField(default="") # comma-separated string created = models.DateTimeField(auto_now_add=True, null=True, db_index=True) updated = models.DateTimeField(auto_now=True, db_index=True) class Meta: app_label = 'notes' def clean(self, json_body): """ Cleans the note object or raises a ValidationError. """ if json_body is None: raise ValidationError('Note must have a body.') body = json.loads(json_body) if not isinstance(body, dict): raise ValidationError('Note body must be a dictionary.') # NOTE: all three of these fields should be considered user input # and may be output back to the user, so we need to sanitize them. # These fields should only contain _plain text_. self.uri = strip_tags(body.get('uri', '')) self.text = strip_tags(body.get('text', '')) self.quote = strip_tags(body.get('quote', '')) ranges = body.get('ranges') if ranges is None or len(ranges) != 1: raise ValidationError('Note must contain exactly one range.') self.range_start = ranges[0]['start'] self.range_start_offset = ranges[0]['startOffset'] self.range_end = ranges[0]['end'] self.range_end_offset = ranges[0]['endOffset'] self.tags = "" tags = [strip_tags(tag) for tag in body.get('tags', [])] if len(tags) > 0: self.tags = ",".join(tags) def get_absolute_url(self): """ Returns the absolute url for the note object. """ kwargs = {'course_id': text_type(self.course_id), 'note_id': str(self.pk)} return reverse('notes_api_note', kwargs=kwargs) def as_dict(self): """ Returns the note object as a dictionary. """ return { 'id': self.pk, 'user_id': self.user.pk, 'uri': self.uri, 'text': self.text, 'quote': self.quote, 'ranges': [{ 'start': self.range_start, 'startOffset': self.range_start_offset, 'end': self.range_end, 'endOffset': self.range_end_offset }], 'tags': self.tags.split(","), 'created': str(self.created), 'updated': str(self.updated) }<|fim▁end|>
from opaque_keys.edx.django.models import CourseKeyField
<|file_name|>test_mlresult.py<|end_file_name|><|fim▁begin|>import os import unittest import numpy as np from tfsnippet.examples.utils import MLResults from tfsnippet.utils import TemporaryDirectory def head_of_file(path, n): with open(path, 'rb') as f: return f.read(n) class MLResultTestCase(unittest.TestCase): def test_imwrite(self): with TemporaryDirectory() as tmpdir: results = MLResults(tmpdir) im = np.zeros([32, 32], dtype=np.uint8) im[16:, ...] = 255 results.save_image('test.bmp', im) file_path = os.path.join(tmpdir, 'test.bmp') self.assertTrue(os.path.isfile(file_path)) self.assertEqual(head_of_file(file_path, 2), b'\x42\x4d') results.save_image('test.png', im) file_path = os.path.join(tmpdir, 'test.png') self.assertTrue(os.path.isfile(file_path)) self.assertEqual(head_of_file(file_path, 8), b'\x89\x50\x4e\x47\x0d\x0a\x1a\x0a')<|fim▁hole|> self.assertTrue(os.path.isfile(file_path)) self.assertEqual(head_of_file(file_path, 3), b'\xff\xd8\xff')<|fim▁end|>
results.save_image('test.jpg', im) file_path = os.path.join(tmpdir, 'test.jpg')
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/// <reference types="node" /> declare module 'sqlite3' { // Type definitions for sqlite3 3.1 // Project: http://github.com/mapbox/node-sqlite3 // Definitions by: Nick Malaguti <https://github.com/nmalaguti> // Sumant Manne <https://github.com/dpyro> // Behind The Math <https://github.com/BehindTheMath> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped import events = require('events') export const OPEN_READONLY: number export const OPEN_READWRITE: number export const OPEN_CREATE: number export const OPEN_SHAREDCACHE: number export const OPEN_PRIVATECACHE: number export const OPEN_URI: number export const cached: { Database( filename: string, callback?: (this: Database, err: Error | null) => void ): Database Database( filename: string, mode?: number, callback?: (this: Database, err: Error | null) => void ): Database } export interface RunResult extends Statement { lastID: number changes: number } export class Statement { bind (callback?: (err: Error | null) => void): this bind (...params: any[]): this reset (callback?: (err: null) => void): this finalize (callback?: (err: Error) => void): Database run (callback?: (err: Error | null) => void): this run ( params: any, callback?: (this: RunResult, err: Error | null) => void ): this run (...params: any[]): this get (callback?: (err: Error | null, row?: any) => void): this get ( params: any, callback?: (this: RunResult, err: Error | null, row?: any) => void ): this get (...params: any[]): this all (callback?: (err: Error | null, rows: any[]) => void): this all ( params: any, callback?: (this: RunResult, err: Error | null, rows: any[]) => void ): this all (...params: any[]): this each ( callback?: (err: Error | null, row: any) => void, complete?: (err: Error | null, count: number) => void ): this each ( params: any, callback?: (this: RunResult, err: Error | null, row: any) => void, complete?: (err: Error | null, count: number) => void ): this each (...params: any[]): this } export class Database extends events.EventEmitter { constructor (filename: string, callback?: (err: Error | null) => void) constructor ( filename: string, mode?: number, callback?: (err: Error | null) => void ) close (callback?: (err: Error | null) => void): void run ( sql: string, callback?: (this: RunResult, err: Error | null) => void ): this run (<|fim▁hole|> ): this run (sql: string, ...params: any[]): this get ( sql: string, callback?: (this: Statement, err: Error | null, row: any) => void ): this get ( sql: string, params: any, callback?: (this: Statement, err: Error | null, row: any) => void ): this get (sql: string, ...params: any[]): this all ( sql: string, callback?: (this: Statement, err: Error | null, rows: any[]) => void ): this all ( sql: string, params: any, callback?: (this: Statement, err: Error | null, rows: any[]) => void ): this all (sql: string, ...params: any[]): this each ( sql: string, callback?: (this: Statement, err: Error | null, row: any) => void, complete?: (err: Error | null, count: number) => void ): this each ( sql: string, params: any, callback?: (this: Statement, err: Error | null, row: any) => void, complete?: (err: Error | null, count: number) => void ): this each (sql: string, ...params: any[]): this exec ( sql: string, callback?: (this: Statement, err: Error | null) => void ): this prepare ( sql: string, callback?: (this: Statement, err: Error | null) => void ): Statement prepare ( sql: string, params: any, callback?: (this: Statement, err: Error | null) => void ): Statement prepare (sql: string, ...params: any[]): Statement serialize (callback?: () => void): void parallelize (callback?: () => void): void on (event: 'trace', listener: (sql: string) => void): this on (event: 'profile', listener: (sql: string, time: number) => void): this on (event: 'error', listener: (err: Error) => void): this on (event: 'open' | 'close', listener: () => void): this on (event: string, listener: (...args: any[]) => void): this configure (option: 'busyTimeout', value: number): void interrupt (): void loadExtension (path: string, callback?: (err: Error | null) => void): void } export function verbose (): sqlite3 export interface sqlite3 { OPEN_READONLY: number OPEN_READWRITE: number OPEN_CREATE: number OPEN_SHAREDCACHE: number OPEN_PRIVATECACHE: number OPEN_URI: number cached: typeof cached RunResult: RunResult Statement: typeof Statement Database: typeof Database verbose(): this } }<|fim▁end|>
sql: string, params: any, callback?: (this: RunResult, err: Error | null) => void
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian // Licensed under the MIT License <LICENSE.md><|fim▁hole|><|fim▁end|>
fn main() { println!("cargo:rustc-flags=-l els"); }
<|file_name|>sample.js<|end_file_name|><|fim▁begin|>var KeyVault = require('azure-keyvault'); var util = require('util'); var Crypto = require('crypto'); var AuthenticationContext = require('adal-node').AuthenticationContext; var clientId = '<to-be-filled>'; var clientSecret = '<to-be-filled>'; var vaultUri = '<to-be-filled>'; // Authenticator - retrieves the access token var authenticator = function (challenge, callback) { // Create a new authentication context. var context = new AuthenticationContext(challenge.authorization); // Use the context to acquire an authentication token. return context.acquireTokenWithClientCredentials(challenge.resource, clientId, clientSecret, function (err, tokenResponse) { if (err) throw err; // Calculate the value to be set in the request's Authorization header and resume the call. var authorizationValue = tokenResponse.tokenType + ' ' + tokenResponse.accessToken; return callback(null, authorizationValue); }); }; var credentials = new KeyVault.KeyVaultCredentials(authenticator); var client = new KeyVault.KeyVaultClient(credentials); var attributes = { expires: new Date('2050-02-02T08:00:00.000Z'), notBefore: new Date('2016-01-01T08:00:00.000Z') }; var keyOperations = ['encrypt', 'decrypt', 'sign', 'verify', 'wrapKey', 'unwrapKey']; //Create a key client.createKey(vaultUri, 'mykey', 'RSA', { keyOps: keyOperations, keyAttributes: attributes }, function(err, keyBundle) { if (err) throw err; console.log('\n\nkey ', keyBundle.key.kid, ' is created.\n', util.inspect(keyBundle, { depth: null })); // Retrieve the key client.getKey(keyBundle.key.kid, function(getErr, getKeyBundle) { if (getErr) throw getErr; console.log('\n\nkey ', getKeyBundle.key.kid, ' is retrieved.\n'); // Encrypt a plain text var encryptionContent = new Buffer('This message is to be encrypted...'); client.encrypt(keyBundle.key.kid, 'RSA-OAEP', encryptionContent, function (encryptErr, cipherText) { if (encryptErr) throw encryptErr; console.log('\n\nText is encrypted: ', cipherText.result); // Decrypt a cipher text client.decrypt(keyBundle.key.kid, 'RSA-OAEP', cipherText.result, function (decryptErr, plainText) { if (decryptErr) throw decryptErr; console.log('\n\nThe encrypted cipher text is decrypted to: ', plainText.result); }); }); // Sign a digest value var hash = Crypto.createHash('sha256'); var digest = hash.update(new Buffer('sign me')).digest(); client.sign(keyBundle.key.kid, 'RS256', digest, function (signErr, signature) { if (signErr) throw signErr; console.log('The signature for digest ', digest, ' is: ', signature.result); // Verify a signature client.verify(keyBundle.key.kid, 'RS256', digest, signature.result, function (verifyErr, verification) { if (verifyErr) throw verifyErr; console.log('The verification', verification.value === true? 'succeeded':'failed'); }); }); });<|fim▁hole|> if (getErr) throw getErr; console.log('\n\nkey ', updatedKeyBundle.key.kid, ' is updated.\n', util.inspect(updatedKeyBundle, { depth: null })); }); // List all versions of the key var parsedId = KeyVault.parseKeyIdentifier(keyBundle.key.kid); client.getKeyVersions(parsedId.vault, parsedId.name, function (getVersionsErr, result) { if (getVersionsErr) throw getVersionsErr; var loop = function (nextLink) { if (nextLink !== null && nextLink !== undefined) { client.getKeyVersionsNext(nextLink, function (err, res) { console.log(res); loop(res.nextLink); }); } }; console.log(result); loop(result.nextLink); }); }); //Create a secret client.setSecret(vaultUri, 'mysecret', 'my password', { contentType: 'test secret', secretAttributes: attributes }, function (err, secretBundle) { if (err) throw err; console.log('\n\nSecret ', secretBundle.id, ' is created.\n', util.inspect(secretBundle, { depth: null })); // Retrieve the secret client.getSecret(secretBundle.id, function (getErr, getSecretBundle) { if (getErr) throw getErr; console.log('\n\nSecret ', getSecretBundle.id, ' is retrieved.\n'); }); // List all secrets var parsedId = KeyVault.parseSecretIdentifier(secretBundle.id); client.getSecrets(parsedId.vault, parsedId.name, function (err, result) { if (err) throw err; var loop = function (nextLink) { if (nextLink !== null && nextLink !== undefined) { client.getSecretsNext(nextLink, function (err, res) { console.log(res); loop(res.nextLink); }); } }; console.log(result); loop(result.nextLink); }); }); var certificatePolicy = { keyProperties : { exportable: true, reuseKey : false, keySize : 2048, keyType : 'RSA' }, secretProperties : { contentType : 'application/x-pkcs12' }, issuerParameters : { name : 'Self' }, x509CertificateProperties : { subject : 'CN=*.microsoft.com', subjectAlternativeNames : ['onedrive.microsoft.com', 'xbox.microsoft.com'], validityInMonths : 24 } }; var intervalTime = 5000; //Create a certificate client.createCertificate(vaultUri, 'mycertificate', { certificatePolicy: certificatePolicy }, function (err, certificateOperation) { if (err) throw err; console.log('\n\nCertificate', certificateOperation.id, 'is being created.\n', util.inspect(certificateOperation, { depth: null })); // Poll the certificate status until it is created var interval = setInterval(function getCertStatus() { var parsedId = KeyVault.parseCertificateOperationIdentifier(certificateOperation.id); client.getCertificateOperation(parsedId.vault, parsedId.name, function (err, pendingCertificate) { if (err) throw err; if (pendingCertificate.status.toUpperCase() === 'completed'.toUpperCase()) { clearInterval(interval); console.log('\n\nCertificate', pendingCertificate.target, 'is created.\n', util.inspect(pendingCertificate, { depth: null })); var parsedCertId = KeyVault.parseCertificateIdentifier(pendingCertificate.target); //Delete the created certificate client.deleteCertificate(parsedCertId.vault, parsedCertId.name, function (delErr, deleteResp) { console.log('\n\nCertificate', pendingCertificate.target, 'is deleted.\n'); }); } else if (pendingCertificate.status.toUpperCase() === 'InProgress'.toUpperCase()) { console.log('\n\nCertificate', certificateOperation.id, 'is being created.\n', util.inspect(pendingCertificate, { depth: null })); } }); }, intervalTime); });<|fim▁end|>
// Update the key with new tags client.updateKey(keyBundle.key.kid, {tags: {'tag1': 'this is tag1', 'tag2': 'this is tag2'}}, function (getErr, updatedKeyBundle) {
<|file_name|>Module.py<|end_file_name|><|fim▁begin|># Copyright 2015 John Reese # Licensed under the MIT license<|fim▁hole|> from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import sys class Module: """ This class provides a generic interface for the preprocessor to pass data to the module and retrieve a list of Transforms to the data. """ priority = 5 """ Priority is defined as a range of integers with 0 being highest priority, and 5 being "normal". """ def __init__(self): self.encoding = sys.getdefaultencoding() def transform(self, data): """ This method should generate a list of Transform objects for each modification to the original data, and return this list when ready. """ return []<|fim▁end|>
<|file_name|>nedb.repository.ts<|end_file_name|><|fim▁begin|>import IModelRepository from '../engine/IModelRepository'; import Model from '../entities/model.entity'; import { IWhereFilter } from "../engine/filter/WhereFilter"; var DataStore = require('NeDb'); /** * This class is a simple implementation of the NeDB data storage. * @see https://github.com/louischatriot/nedb * * The use of operators for querying data is encouraged. * The following operators are defined by NeDB and are widely used: * Operators ($lt, $lte, $gt, $gte, $in, $nin, $ne, $exists, $regex) * The syntax is { field: { $op: value } } where $op is any comparison operator: * * $lt, $lte: less than, less than or equal * $gt, $gte: greater than, greater than or equal * $in: member of. value must be an array of values * $ne, $nin: not equal, not a member of * $exists: checks whether the document posses the property field. value should be true or false * $regex: checks whether a string is matched by the regular expression. Contrary to MongoDB, the use of $options with $regex is not supported, because it doesn't give you more power than regex flags. Basic queries are more readable so only use the $regex operator when you need to use another operator with it. */ export default class ModelRepository implements IModelRepository { private db; constructor() { this.db = new DataStore({ filename: './build/database/nedb/storage.db', autoload: true }); } /** * Return the number of records that match the optional "where" filter. * * @param: modelName string * The database table/record to be queried. * @param: [where] IWhereFilter * Optional where filter, like { key: val, key2: {gt: 'val2'}, ...} */ count(modelName: string, where?: IWhereFilter) { let filter; if (typeof where !== 'undefined') { filter = {}; // count all } else { filter = where; } this.db.count(filter, (err, count) => { // TODO: turn this into promise //callback(err, count); }); }; /** * Create new instance of Model, and save to database. * * @param: model Object * data Optional data argument. Can be either a single model instance or an array of instances. */ create(model) { if (typeof model !== 'undefined') { this.db.insert(model, (err, newModel) => { // TODO: turn this into promise //callback(err, newModel); }); } else { let err = new Error('Please provide a valid model!'); // TODO: turn this into promise //callback(err, null); } }; /** * Destroy all model instances that match the optional where specification. * * @param: modelName string * the name of the table/record to be deleted. * @param: [where] IWhereFilter * Optional where filter, like: {key: val, key2: {gt: 'val2'}, ...} */ destroyAll(modelName: string, where?: IWhereFilter) { // if (typeof where !== 'undefined' && where !== {}) { // this.db.remove(where, { multi: true}, (err, numRemoved) => { // // TODO: turn this into promise // //callback(err, numRemoved); // }); // } let err = new Error('Removing all documents not supported!'); // TODO: turn this into promise //callback(err, null); }; /** * Destroy model instance with the specified ID. * * @param: id * The ID value of model instance to delete. * @param: modelName string * the name of the table/record to be deleted. * @param: [where] IWhereFilter * Optional where filter, like: {key: val, key2: {gt: 'val2'}, ...} */ destroyById(id, modelName: string, where?: IWhereFilter) { if (typeof id !== 'undefined') { this.db.remove({ _id: id }, {}, (err, numRemoved) => { // TODO: turn this into promise //callback(err, numRemoved); }); } let err = new Error('Please provide an id!'); // TODO: turn this into promise //callback(err, null); }; /** * Check whether a model instance exists in database. * * @param: id * Identifier of object (primary key value). * @param: modelName string * the name of the table/record to be deleted. */ exists(id, modelName: string) {}; /** * Find all model instances that match filter specification. * * @param: modelName string * the name of the table/record to be deleted. * @param: [where] IWhereFilter * Model instances matching the filter, or null if none found. */ find(modelName: string, where?: IWhereFilter) { let filter; // if (typeof where == 'undefined' || where === {}) { // filter = {}; // find all // } else { // filter = where; // } this.db.find(filter, (err, models) => { // TODO: turn this into promise //callback(err, models); }); }; /** * Find object by ID with an optional filter for include/fields. * * @param: id * Primary key value * @param: [where] IWhereFilter * Optional Filter JSON object */ findById(id, where?: IWhereFilter) { if (typeof id !== 'undefined') { let merge = { _id: id }; if (typeof where !== 'undefined') { // merge the two objects and pass them to db for (var key in where) { if (where.hasOwnProperty(key)) { merge[key] = where[key]; } } } this.db.findOne(merge, (err, model) => { // TODO: turn this into promise //callback(err, model); }); } else { let err = new Error('Please provide an id!'); // TODO: turn this into promise //callback(err, null); } }; /** * Update single model instance that match the where clause. * * @param: id * Primary key value * @param: [where] IWhereFilter * Optional where filter, like {} */ updateById(id, where?: IWhereFilter) {}; /** * Update multiple instances that match the where clause. * * @param: models Object * Object containing data to replace matching instances, if any.<|fim▁hole|> updateAll(models, where?: IWhereFilter) { this.db.update({}, {}, { multi: true }, (err, numReplaced) => { }); }; }<|fim▁end|>
* @param: [where] IWhereFilter * Optional where filter, like { key: val, key2: {gt: 'val2'}, ...} * see Where filter. */
<|file_name|>JobContext.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0<|fim▁hole|> * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.eagle.jobrunning.crawler; public class JobContext { public String jobId; public String user; public Long fetchedTime; public JobContext() { } public JobContext(JobContext context) { this.jobId = new String(context.jobId); this.user = new String(context.user); this.fetchedTime = new Long(context.fetchedTime); } public JobContext(String jobId, String user, Long fetchedTime) { this.jobId = jobId; this.user = user; this.fetchedTime = fetchedTime; } @Override public int hashCode() { return jobId.hashCode() ; } @Override public boolean equals(Object obj) { if (obj instanceof JobContext) { JobContext context = (JobContext)obj; if (this.jobId.equals(context.jobId)) { return true; } } return false; } }<|fim▁end|>
* (the "License"); you may not use this file except in compliance with
<|file_name|>general.js<|end_file_name|><|fim▁begin|>// Copyright 2014 The Oppia Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Minor general functional components for end-to-end testing * with protractor. */ var editor = require('./editor.js'); // Time (in ms) to wait when the system needs time for some computations. var WAIT_TIME = 4000; // Optionally accepts a waitTime integer in milliseconds. var waitForSystem = function() { var waitTime; if (arguments.length === 1) { waitTime = arguments[0]; } else { waitTime = WAIT_TIME; } browser.sleep(waitTime); }; var scrollToTop = function() { browser.executeScript('window.scrollTo(0,0);'); }; // We will report all console logs of level greater than this. var CONSOLE_LOG_THRESHOLD = 900; var CONSOLE_ERRORS_TO_IGNORE = []; var checkForConsoleErrors = function(errorsToIgnore) { var irrelevantErrors = errorsToIgnore.concat(CONSOLE_ERRORS_TO_IGNORE); browser.manage().logs().get('browser').then(function(browserLogs) { var fatalErrors = []; for (var i = 0; i < browserLogs.length; i++) { if (browserLogs[i].level.value > CONSOLE_LOG_THRESHOLD) { var errorFatal = true; for (var j = 0; j < irrelevantErrors.length; j++) { if (browserLogs[i].message.match(irrelevantErrors[j])) { errorFatal = false; } } if (errorFatal) { fatalErrors.push(browserLogs[i]); } } } expect(fatalErrors).toEqual([]); }); }; var SERVER_URL_PREFIX = 'http://localhost:9001'; var LIBRARY_URL_SUFFIX = '/library'; var EDITOR_URL_SLICE = '/create/'; var PLAYER_URL_SLICE = '/explore/'; var LOGIN_URL_SUFFIX = '/_ah/login'; var ADMIN_URL_SUFFIX = '/admin'; var MODERATOR_URL_SUFFIX = '/moderator'; var DONATION_THANK_URL_SUFFIX = '/thanks'; // Note that this only works in dev, due to the use of cache slugs in prod. var SCRIPTS_URL_SLICE = '/assets/scripts/'; var EXPLORATION_ID_LENGTH = 12; var FIRST_STATE_DEFAULT_NAME = 'Introduction'; var _getExplorationId = function(currentUrlPrefix) { return { then: function(callbackFunction) { browser.getCurrentUrl().then(function(url) { expect(url.slice(0, currentUrlPrefix.length)).toBe(currentUrlPrefix); var explorationId = url.slice( currentUrlPrefix.length, currentUrlPrefix.length + EXPLORATION_ID_LENGTH); return callbackFunction(explorationId); }); } }; }; // If we are currently in the editor, this will return a promise with the // exploration ID. var getExplorationIdFromEditor = function() { return _getExplorationId(SERVER_URL_PREFIX + EDITOR_URL_SLICE); }; // Likewise for the player var getExplorationIdFromPlayer = function() { return _getExplorationId(SERVER_URL_PREFIX + PLAYER_URL_SLICE); }; // The explorationId here should be a string, not a promise. var openEditor = function(explorationId) { browser.get(EDITOR_URL_SLICE + explorationId); browser.waitForAngular(); editor.exitTutorialIfNecessary(); }; var openPlayer = function(explorationId) { browser.get(PLAYER_URL_SLICE + explorationId); browser.waitForAngular(); }; // Takes the user from an exploration editor to its player. // NOTE: we do not use the preview button because that will open a new window. var moveToPlayer = function() { getExplorationIdFromEditor().then(openPlayer); }; // Takes the user from the exploration player to its editor. var moveToEditor = function() { getExplorationIdFromPlayer().then(openEditor); }; var expect404Error = function() { expect(element(by.css('.protractor-test-error-container')).getText()). toMatch('Error 404'); }; // Checks no untranslated values are shown in the page. var ensurePageHasNoTranslationIds = function() { // The use of the InnerHTML is hacky, but is faster than checking each // individual component that contains text. element(by.css('.oppia-base-container')).getInnerHtml().then( function(promiseValue) { // First remove all the attributes translate and variables that are // not displayed var REGEX_TRANSLATE_ATTR = new RegExp('translate="I18N_', 'g'); var REGEX_NG_VARIABLE = new RegExp('<\\[\'I18N_', 'g'); expect(promiseValue.replace(REGEX_TRANSLATE_ATTR, '') .replace(REGEX_NG_VARIABLE, '')).not.toContain('I18N'); }); }; var acceptAlert = function() { browser.wait(function() { return browser.switchTo().alert().accept().then( function() { return true; }, function() { return false; } ); }); }; exports.acceptAlert = acceptAlert; exports.waitForSystem = waitForSystem; exports.scrollToTop = scrollToTop;<|fim▁hole|>exports.SERVER_URL_PREFIX = SERVER_URL_PREFIX; exports.LIBRARY_URL_SUFFIX = LIBRARY_URL_SUFFIX; exports.EDITOR_URL_SLICE = EDITOR_URL_SLICE; exports.LOGIN_URL_SUFFIX = LOGIN_URL_SUFFIX; exports.MODERATOR_URL_SUFFIX = MODERATOR_URL_SUFFIX; exports.ADMIN_URL_SUFFIX = ADMIN_URL_SUFFIX; exports.DONATION_THANK_URL_SUFFIX = DONATION_THANK_URL_SUFFIX; exports.SCRIPTS_URL_SLICE = SCRIPTS_URL_SLICE; exports.FIRST_STATE_DEFAULT_NAME = FIRST_STATE_DEFAULT_NAME; exports.getExplorationIdFromEditor = getExplorationIdFromEditor; exports.getExplorationIdFromPlayer = getExplorationIdFromPlayer; exports.openEditor = openEditor; exports.openPlayer = openPlayer; exports.moveToPlayer = moveToPlayer; exports.moveToEditor = moveToEditor; exports.expect404Error = expect404Error; exports.ensurePageHasNoTranslationIds = ensurePageHasNoTranslationIds;<|fim▁end|>
exports.checkForConsoleErrors = checkForConsoleErrors;
<|file_name|>bucket.rs<|end_file_name|><|fim▁begin|>// TODO: Remove when the routing table uses the new bucket iterators. #![allow(unused)] use std::iter::Filter; use std::net::{Ipv4Addr, SocketAddrV4, SocketAddr}; use std::slice::Iter; use bip_util::bt::{self, NodeId}; use routing::node::{Node, NodeStatus}; /// Maximum number of nodes that should reside in any bucket. pub const MAX_BUCKET_SIZE: usize = 8; /// Bucket containing Nodes with identical bit prefixes. pub struct Bucket { nodes: [Node; MAX_BUCKET_SIZE], } impl Bucket { /// Create a new Bucket with all Nodes default initialized. pub fn new() -> Bucket { let id = NodeId::from([0u8; bt::NODE_ID_LEN]); let ip = Ipv4Addr::new(127, 0, 0, 1); let addr = SocketAddr::V4(SocketAddrV4::new(ip, 0)); Bucket { nodes: [Node::as_bad(id, addr), Node::as_bad(id, addr), Node::as_bad(id, addr), Node::as_bad(id, addr), Node::as_bad(id, addr), Node::as_bad(id, addr), Node::as_bad(id, addr), Node::as_bad(id, addr)], } } /// Iterator over all good nodes in the bucket. pub fn good_nodes<'a>(&'a self) -> GoodNodes<'a> { GoodNodes::new(&self.nodes) } /// Iterator over all good nodes and questionable nodes in the bucket. pub fn pingable_nodes<'a>(&'a self) -> PingableNodes<'a> { PingableNodes::new(&self.nodes) } /// Iterator over each node within the bucket. /// /// For buckets newly created, the initial bad nodes are included. pub fn iter(&self) -> Iter<Node> { self.nodes.iter() } /// Indicates if the bucket needs to be refreshed. pub fn needs_refresh(&self) -> bool { self.nodes.iter().fold(true, |prev, node| prev && node.status() != NodeStatus::Good) } /// Attempt to add the given Node to the bucket if it is not in a bad state. /// /// Returns false if the Node could not be placed in the bucket because it is full. pub fn add_node(&mut self, new_node: Node) -> bool { let new_node_status = new_node.status(); if new_node_status == NodeStatus::Bad { return true; } // See if this node is already in the table, in that case replace it if it // has a higher or equal status to the current node. if let Some(index) = self.nodes.iter().position(|node| *node == new_node) { let other_node_status = self.nodes[index].status(); if new_node_status >= other_node_status { self.nodes[index] = new_node; } return true; } // See if any lower priority nodes are present in the table, we cant do // nodes that have equal status because we have to prefer longer lasting // nodes in the case of a good status which helps with stability. let replace_index = self.nodes.iter().position(|node| node.status() < new_node_status); if let Some(index) = replace_index { self.nodes[index] = new_node; true } else { false } } } // ----------------------------------------------------------------------------// pub struct GoodNodes<'a> { iter: Filter<Iter<'a, Node>, fn(&&Node) -> bool>, } impl<'a> GoodNodes<'a> { fn new(nodes: &'a [Node]) -> GoodNodes<'a> { GoodNodes { iter: nodes.iter().filter(good_nodes_filter) } } } fn good_nodes_filter(node: &&Node) -> bool { node.status() == NodeStatus::Good } impl<'a> Iterator for GoodNodes<'a> { type Item = &'a Node; fn next(&mut self) -> Option<&'a Node> { self.iter.next() } } // ----------------------------------------------------------------------------// pub struct PingableNodes<'a> { iter: Filter<Iter<'a, Node>, fn(&&Node) -> bool>, } impl<'a> PingableNodes<'a> { fn new(nodes: &'a [Node]) -> PingableNodes<'a> { PingableNodes { iter: nodes.iter().filter(pingable_nodes_filter) } } } fn pingable_nodes_filter(node: &&Node) -> bool { // Function is moderately expensive let status = node.status(); status == NodeStatus::Good || status == NodeStatus::Questionable } impl<'a> Iterator for PingableNodes<'a> { type Item = &'a Node; fn next(&mut self) -> Option<&'a Node> { self.iter.next() } } // ----------------------------------------------------------------------------// #[cfg(test)] mod tests { use bip_util::sha::{self, ShaHash}; use bip_util::test as bip_test; use routing::bucket::{self, Bucket}; use routing::node::{Node, NodeStatus}; #[test] fn positive_initial_no_nodes() { let bucket = Bucket::new(); assert_eq!(bucket.good_nodes().count(), 0); assert_eq!(bucket.pingable_nodes().count(), 0); } #[test] fn positive_all_questionable_nodes() { let mut bucket = Bucket::new(); let dummy_addr = bip_test::dummy_socket_addr_v4(); let dummy_ids = bip_test::dummy_block_node_ids(super::MAX_BUCKET_SIZE as u8); for index in 0..super::MAX_BUCKET_SIZE { let node = Node::as_questionable(dummy_ids[index], dummy_addr); bucket.add_node(node); } assert_eq!(bucket.good_nodes().count(), 0); assert_eq!(bucket.pingable_nodes().count(), super::MAX_BUCKET_SIZE); } #[test] fn positive_all_good_nodes() { let mut bucket = Bucket::new(); let dummy_addr = bip_test::dummy_socket_addr_v4(); let dummy_ids = bip_test::dummy_block_node_ids(super::MAX_BUCKET_SIZE as u8); for index in 0..super::MAX_BUCKET_SIZE { let node = Node::as_good(dummy_ids[index], dummy_addr); bucket.add_node(node); } assert_eq!(bucket.good_nodes().count(), super::MAX_BUCKET_SIZE); assert_eq!(bucket.pingable_nodes().count(), super::MAX_BUCKET_SIZE); } #[test] fn positive_replace_questionable_node() { let mut bucket = Bucket::new(); let dummy_addr = bip_test::dummy_socket_addr_v4(); let dummy_ids = bip_test::dummy_block_node_ids(super::MAX_BUCKET_SIZE as u8); for index in 0..super::MAX_BUCKET_SIZE { let node = Node::as_questionable(dummy_ids[index], dummy_addr); bucket.add_node(node); } assert_eq!(bucket.good_nodes().count(), 0); assert_eq!(bucket.pingable_nodes().count(), super::MAX_BUCKET_SIZE); let good_node = Node::as_good(dummy_ids[0], dummy_addr); bucket.add_node(good_node.clone()); assert_eq!(bucket.good_nodes().next().unwrap(), &good_node); assert_eq!(bucket.good_nodes().count(), 1); assert_eq!(bucket.pingable_nodes().count(), super::MAX_BUCKET_SIZE); }<|fim▁hole|> let dummy_addr = bip_test::dummy_socket_addr_v4(); let dummy_ids = bip_test::dummy_block_node_ids((super::MAX_BUCKET_SIZE as u8) + 1); for index in 0..super::MAX_BUCKET_SIZE { let node = Node::as_good(dummy_ids[index], dummy_addr); bucket.add_node(node); } // All the nodes should be good assert_eq!(bucket.good_nodes().count(), super::MAX_BUCKET_SIZE); // Create a new good node let unused_id = dummy_ids[dummy_ids.len() - 1]; let new_good_node = Node::as_good(unused_id, dummy_addr); // Make sure the node is NOT in the bucket assert!(bucket.good_nodes().find(|node| &&new_good_node == node).is_none()); // Try to add it bucket.add_node(new_good_node.clone()); // Make sure the node is NOT in the bucket assert!(bucket.good_nodes().find(|node| &&new_good_node == node).is_none()); } #[test] fn positive_resist_questionable_node_churn() { let mut bucket = Bucket::new(); let dummy_addr = bip_test::dummy_socket_addr_v4(); let dummy_ids = bip_test::dummy_block_node_ids((super::MAX_BUCKET_SIZE as u8) + 1); for index in 0..super::MAX_BUCKET_SIZE { let node = Node::as_questionable(dummy_ids[index], dummy_addr); bucket.add_node(node); } // All the nodes should be questionable assert_eq!(bucket.pingable_nodes() .filter(|node| node.status() == NodeStatus::Questionable) .count(), super::MAX_BUCKET_SIZE); // Create a new questionable node let unused_id = dummy_ids[dummy_ids.len() - 1]; let new_questionable_node = Node::as_questionable(unused_id, dummy_addr); // Make sure the node is NOT in the bucket assert!(bucket.pingable_nodes().find(|node| &&new_questionable_node == node).is_none()); // Try to add it bucket.add_node(new_questionable_node.clone()); // Make sure the node is NOT in the bucket assert_eq!(bucket.pingable_nodes() .filter(|node| node.status() == NodeStatus::Questionable) .count(), super::MAX_BUCKET_SIZE); } }<|fim▁end|>
#[test] fn positive_resist_good_node_churn() { let mut bucket = Bucket::new();
<|file_name|>auto_seq2seq.py<|end_file_name|><|fim▁begin|># # Copyright 2018 Analytics Zoo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either exp' # ress or implied. # See the License for the specific language governing permissions and # limitations under the License. # from zoo.orca.automl.model.base_pytorch_model import PytorchModelBuilder from zoo.orca.automl.auto_estimator import AutoEstimator from zoo.chronos.model.Seq2Seq_pytorch import model_creator from .base_automodel import BasePytorchAutomodel class AutoSeq2Seq(BasePytorchAutomodel): def __init__(self, input_feature_num, output_target_num, past_seq_len, future_seq_len, optimizer, loss, metric, lr=0.001, lstm_hidden_dim=128, lstm_layer_num=2, dropout=0.25, teacher_forcing=False, backend="torch", logs_dir="/tmp/auto_seq2seq", cpus_per_trial=1, name="auto_seq2seq",<|fim▁hole|> :param input_feature_num: Int. The number of features in the input :param output_target_num: Int. The number of targets in the output :param past_seq_len: Int. The number of historical steps used for forecasting. :param future_seq_len: Int. The number of future steps to forecast. :param optimizer: String or pyTorch optimizer creator function or tf.keras optimizer instance. :param loss: String or pytorch/tf.keras loss instance or pytorch loss creator function. :param metric: String. The evaluation metric name to optimize. e.g. "mse" :param lr: float or hp sampling function from a float space. Learning rate. e.g. hp.choice([0.001, 0.003, 0.01]) :param lstm_hidden_dim: LSTM hidden channel for decoder and encoder. hp.grid_search([32, 64, 128]) :param lstm_layer_num: LSTM layer number for decoder and encoder. e.g. hp.randint(1, 4) :param dropout: float or hp sampling function from a float space. Learning rate. Dropout rate. e.g. hp.uniform(0.1, 0.3) :param teacher_forcing: If use teacher forcing in training. e.g. hp.choice([True, False]) :param backend: The backend of the Seq2Seq model. We only support backend as "torch" for now. :param logs_dir: Local directory to save logs and results. It defaults to "/tmp/auto_seq2seq" :param cpus_per_trial: Int. Number of cpus for each trial. It defaults to 1. :param name: name of the AutoSeq2Seq. It defaults to "auto_seq2seq" :param remote_dir: String. Remote directory to sync training results and checkpoints. It defaults to None and doesn't take effects while running in local. While running in cluster, it defaults to "hdfs:///tmp/{name}". """ super().__init__() # todo: support search for past_seq_len. # todo: add input check. if backend != "torch": raise ValueError(f"We only support backend as torch. Got {backend}") self.search_space = dict( input_feature_num=input_feature_num, output_feature_num=output_target_num, past_seq_len=past_seq_len, future_seq_len=future_seq_len, lstm_hidden_dim=lstm_hidden_dim, lstm_layer_num=lstm_layer_num, lr=lr, dropout=dropout, teacher_forcing=teacher_forcing ) self.metric = metric model_builder = PytorchModelBuilder(model_creator=model_creator, optimizer_creator=optimizer, loss_creator=loss, ) self.auto_est = AutoEstimator(model_builder=model_builder, logs_dir=logs_dir, resources_per_trial={"cpu": cpus_per_trial}, remote_dir=remote_dir, name=name)<|fim▁end|>
remote_dir=None, ): """ Create an AutoSeq2Seq.
<|file_name|>issue-3743.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. struct Vec2 { x: f64,<|fim▁hole|> y: f64 } // methods we want to export as methods as well as operators impl Vec2 { #[inline(always)] fn vmul(self, other: f64) -> Vec2 { Vec2 { x: self.x * other, y: self.y * other } } } // Right-hand-side operator visitor pattern trait RhsOfVec2Mul<Result> { fn mul_vec2_by(&self, lhs: &Vec2) -> Result; } // Vec2's implementation of Mul "from the other side" using the above trait impl<Res, Rhs: RhsOfVec2Mul<Res>> Mul<Rhs,Res> for Vec2 { fn mul(&self, rhs: &Rhs) -> Res { rhs.mul_vec2_by(self) } } // Implementation of 'f64 as right-hand-side of Vec2::Mul' impl RhsOfVec2Mul<Vec2> for f64 { fn mul_vec2_by(&self, lhs: &Vec2) -> Vec2 { lhs.vmul(*self) } } // Usage with failing inference pub fn main() { let a = Vec2 { x: 3.0f64, y: 4.0f64 }; // the following compiles and works properly let v1: Vec2 = a * 3.0f64; println!("{} {}", v1.x, v1.y); // the following compiles but v2 will not be Vec2 yet and // using it later will cause an error that the type of v2 // must be known let v2 = a * 3.0f64; println!("{} {}", v2.x, v2.y); // error regarding v2's type }<|fim▁end|>
<|file_name|>test_stock_cycle_count.py<|end_file_name|><|fim▁begin|># Copyright 2017 ForgeFlow S.L. # (http://www.forgeflow.com) # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html). from datetime import datetime, timedelta from odoo.exceptions import AccessError, ValidationError from odoo.tests import common class TestStockCycleCount(common.TransactionCase): def setUp(self): super(TestStockCycleCount, self).setUp() self.res_users_model = self.env["res.users"] self.cycle_count_model = self.env["stock.cycle.count"] self.stock_cycle_count_rule_model = self.env["stock.cycle.count.rule"] self.inventory_model = self.env["stock.inventory"] self.stock_location_model = self.env["stock.location"] self.stock_move_model = self.env["stock.move"] self.stock_warehouse_model = self.env["stock.warehouse"] self.product_model = self.env["product.product"] self.quant_model = self.env["stock.quant"] self.move_model = self.env["stock.move"] self.company = self.env.ref("base.main_company") self.partner = self.env.ref("base.res_partner_1") self.g_stock_manager = self.env.ref("stock.group_stock_manager") self.g_stock_user = self.env.ref("stock.group_stock_user") # Create users: self.manager = self._create_user( "user_1", [self.g_stock_manager], self.company ).id self.user = self._create_user("user_2", [self.g_stock_user], self.company).id # Create warehouses: self.big_wh = self.stock_warehouse_model.create( {"name": "BIG", "code": "B", "cycle_count_planning_horizon": 30} ) self.small_wh = self.stock_warehouse_model.create( {"name": "SMALL", "code": "S"} ) # Create rules: self.rule_periodic = self._create_stock_cycle_count_rule_periodic( self.manager, "rule_1", [2, 7] ) self.rule_turnover = self._create_stock_cycle_count_rule_turnover( self.manager, "rule_2", [100] ) self.rule_accuracy = self._create_stock_cycle_count_rule_accuracy( self.manager, "rule_3", [5], self.big_wh.view_location_id.ids ) self.zero_rule = self._create_stock_cycle_count_rule_zero( self.manager, "rule_4" ) # Configure warehouses: self.rule_ids = [ self.rule_periodic.id, self.rule_turnover.id, self.rule_accuracy.id, self.zero_rule.id, ] self.big_wh.write({"cycle_count_rule_ids": [(6, 0, self.rule_ids)]}) # Create a location:<|fim▁hole|> {"name": "Place", "usage": "production"} ) self.stock_location_model._parent_store_compute() # Create a cycle count: self.cycle_count_1 = self.cycle_count_model.with_user(self.manager).create( { "name": "Test cycle count", "cycle_count_rule_id": self.rule_periodic.id, "location_id": self.count_loc.id, } ) # Create a product: self.product1 = self.product_model.create( {"name": "Test Product 1", "type": "product", "default_code": "PROD1"} ) def _create_user(self, login, groups, company): group_ids = [group.id for group in groups] user = self.res_users_model.create( { "name": login, "login": login, "email": "[email protected]", "company_id": company.id, "company_ids": [(4, company.id)], "groups_id": [(6, 0, group_ids)], } ) return user def _create_stock_cycle_count_rule_periodic(self, uid, name, values): rule = self.stock_cycle_count_rule_model.with_user(uid).create( { "name": name, "rule_type": "periodic", "periodic_qty_per_period": values[0], "periodic_count_period": values[1], } ) return rule def _create_stock_cycle_count_rule_turnover(self, uid, name, values): rule = self.stock_cycle_count_rule_model.with_user(uid).create( { "name": name, "rule_type": "turnover", "turnover_inventory_value_threshold": values[0], } ) return rule def _create_stock_cycle_count_rule_accuracy(self, uid, name, values, zone_ids): rule = self.stock_cycle_count_rule_model.with_user(uid).create( { "name": name, "rule_type": "accuracy", "accuracy_threshold": values[0], "apply_in": "location", "location_ids": [(6, 0, zone_ids)], } ) return rule def _create_stock_cycle_count_rule_zero(self, uid, name): rule = self.stock_cycle_count_rule_model.with_user(uid).create( {"name": name, "rule_type": "zero"} ) return rule def test_cycle_count_planner(self): """Tests creation of cycle counts.""" # Common rules: wh = self.big_wh locs = self.stock_location_model for rule in self.big_wh.cycle_count_rule_ids: locs += wh._search_cycle_count_locations(rule) locs = locs.exists() # remove duplicated locations. counts = self.cycle_count_model.search([("location_id", "in", locs.ids)]) self.assertFalse(counts, "Existing cycle counts before execute planner.") date_pre_existing_cc = datetime.today() + timedelta(days=30) loc = locs.filtered(lambda l: l.usage != "view")[0] pre_existing_count = self.cycle_count_model.create( { "name": "To be cancelled when running cron job.", "cycle_count_rule_id": self.rule_periodic.id, "location_id": loc.id, "date_deadline": date_pre_existing_cc, } ) self.assertEqual( pre_existing_count.state, "draft", "Testing data not generated properly." ) date = datetime.today() - timedelta(days=1) self.inventory_model.create( { "name": "Pre-existing inventory", "location_ids": [(4, loc.id)], "date": date, } ) self.quant_model.create( { "product_id": self.product1.id, "location_id": self.count_loc.id, "quantity": 1.0, } ) move1 = self.stock_move_model.create( { "name": "Pre-existing move", "product_id": self.product1.id, "product_uom_qty": 1.0, "product_uom": self.product1.uom_id.id, "location_id": self.count_loc.id, "location_dest_id": loc.id, } ) move1._action_confirm() move1._action_assign() move1.move_line_ids[0].qty_done = 1.0 move1._action_done() wh.cron_cycle_count() self.assertNotEqual( pre_existing_count.date_deadline, date_pre_existing_cc, "Date of pre-existing cycle counts has not been " "updated.", ) counts = self.cycle_count_model.search([("location_id", "in", locs.ids)]) self.assertTrue(counts, "Cycle counts not planned") # Zero-confirmations: count = self.cycle_count_model.search( [ ("location_id", "=", loc.id), ("cycle_count_rule_id", "=", self.zero_rule.id), ] ) self.assertFalse(count, "Unexpected zero confirmation.") move2 = self.move_model.create( { "name": "make the locations to run out of stock.", "product_id": self.product1.id, "product_uom_qty": 1.0, "product_uom": self.product1.uom_id.id, "location_id": loc.id, "location_dest_id": self.count_loc.id, } ) move2._action_confirm() move2._action_assign() move2.move_line_ids[0].qty_done = 1.0 move2._action_done() count = self.cycle_count_model.search( [ ("location_id", "=", loc.id), ("cycle_count_rule_id", "=", self.zero_rule.id), ] ) self.assertTrue(count, "Zero confirmation not being created.") def test_cycle_count_workflow(self): """Tests workflow.""" self.cycle_count_1.action_create_inventory_adjustment() inventory = self.inventory_model.search( [("cycle_count_id", "=", self.cycle_count_1.id)] ) self.assertTrue(inventory, "Inventory not created.") inventory.action_start() inventory.action_validate() self.assertEqual( self.cycle_count_1.state, "done", "Cycle count not set as done." ) self.cycle_count_1.do_cancel() self.assertEqual( self.cycle_count_1.state, "cancelled", "Cycle count not set as cancelled." ) def test_view_methods(self): """Tests the methods used to handle views.""" self.cycle_count_1.action_create_inventory_adjustment() self.cycle_count_1.sudo().action_view_inventory() inv_count = self.cycle_count_1.inventory_adj_count self.assertEqual(inv_count, 1, "View method failing.") rules = [ self.rule_periodic, self.rule_turnover, self.rule_accuracy, self.zero_rule, ] for r in rules: r._compute_rule_description() self.assertTrue(r.rule_description, "No description provided") self.rule_accuracy._onchange_locaton_ids() self.assertEqual( self.rule_accuracy.warehouse_ids.ids, self.big_wh.ids, "Rules defined for zones are not getting the right " "warehouse.", ) def test_user_security(self): """Tests user rights.""" with self.assertRaises(AccessError): self._create_stock_cycle_count_rule_periodic(self.user, "rule_1b", [2, 7]) with self.assertRaises(AccessError): self.cycle_count_1.with_user(self.user).unlink() def test_rule_periodic_constrains(self): """Tests the constrains for the periodic rules.""" # constrain: periodic_qty_per_period < 1 with self.assertRaises(ValidationError): self._create_stock_cycle_count_rule_periodic(self.manager, "rule_0", [0, 0]) # constrain: periodic_count_period < 0 with self.assertRaises(ValidationError): self._create_stock_cycle_count_rule_periodic( self.manager, "rule_0", [1, -1] ) def test_rule_zero_constrains(self): """Tests the constrains for the zero-confirmation rule: it might only exist one zero confirmation rule per warehouse and have just one warehouse assigned. """ zero2 = self._create_stock_cycle_count_rule_zero(self.manager, "zero_rule_2") with self.assertRaises(ValidationError): zero2.warehouse_ids = [(4, self.big_wh.id)] with self.assertRaises(ValidationError): self.zero_rule.warehouse_ids = [(4, self.small_wh.id)] def test_auto_link_inventory_to_cycle_count_1(self): """Create an inventory that could fit a planned cycle count should auto-link it to that cycle count.""" self.assertEqual(self.cycle_count_1.state, "draft") inventory = self.inventory_model.create( { "name": "new inventory", "location_ids": [(4, self.count_loc.id)], "exclude_sublocation": True, } ) self.assertEqual(inventory.cycle_count_id, self.cycle_count_1) self.assertEqual(self.cycle_count_1.state, "open") def test_auto_link_inventory_to_cycle_count_2(self): """Test auto-link when exclude sublocation is no set.""" self.assertEqual(self.cycle_count_1.state, "draft") inventory = self.inventory_model.create( {"name": "new inventory", "location_ids": [(4, self.count_loc.id)]} ) self.assertEqual(inventory.cycle_count_id, self.cycle_count_1) self.assertEqual(self.cycle_count_1.state, "open")<|fim▁end|>
self.count_loc = self.stock_location_model.create(
<|file_name|>city.cpp<|end_file_name|><|fim▁begin|>/* * File: city.cpp * Author: matthew * * Created on 24 October 2013, 20:28 */ #include <gtk/gtk.h> #include <iostream> #include <stdlib.h> /* abs */<|fim▁hole|>#include "city.h" #include "unit_ref.h" #include "includes.h" using namespace std; cCity::cCity(gint iCityPlayerId_, gint iCellX_, gint iCellY_, gint iCityIslandListId_) { //if (debugCityAdd) println(" cCity constructor for intCityPlayerId_=" + intCityPlayerId_+", iCityIslandListId_="+iCityIslandListId_); iCityPlayerId=iCityPlayerId_; setCityIslandListId(iCityIslandListId_); iCellX=iCellX_; iCellY=iCellY_; iStrength=1; //int(random(1,3)); //if( intCityPlayerId_==1 ) { //println("cCity constructor for intCityPlayerId_=" + intCityPlayerId_ +" strength="+strength); //} if( iCityPlayerId_ != -1 ) { //count << "is human or computer city so build a tank"; // game rule: default initial production to Tank iProductionUnitTypeId= 0; //iProductionDaysLeft = cUnitRef::getDaysToProduce(0); iProductionDaysLeft = 4; //sProductionUnitTypeName = cUnitRef::getUnitName(0); //println("productionUnitTypeId=" + productionUnitTypeId + ", productionDaysLeft=" + productionDaysLeft); //oIslandList.updateIslandPlayerCityCount(getCityIslandListId(), -1, intCityPlayerId_); } else { //println("city is unoccupied so build nothing"); // game rule: empty city does not produce anything iProductionUnitTypeId= -1; iProductionDaysLeft = -1; //sProductionUnitTypeName = "N/A"; //println("productionUnitTypeId=" + productionUnitTypeId + ", productionDaysLeft=" + productionDaysLeft); //oIslandList.increaseUnoccupiedCityCount( getCityIslandListId() ); } } //GString cCity::getLocation() { return nf(iCellX,3)+","+nf(iCellY,3); } gint cCity::getCellX() { return iCellX; } gint cCity::getCellY() { return iCellY; } gint cCity::getCityPlayerId() { return iCityPlayerId; } gint cCity::setCityPlayerId(gint iCityPlayerId_) { iCityPlayerId=iCityPlayerId_; } gint cCity::setCityIslandListId(gint value_) { iCityIslandListId=value_; } gint cCity::getCityIslandListId() { return iCityIslandListId; } void cCity::printRowCol() { cout << "city at row=" << iCellX << ", col=" << iCellY; } gchar* cCity::getStatus() { gchar* sStatus="Unoccupied"; switch( getCityPlayerId() ) { case 1: sStatus="player 1"; break; case 2: sStatus="player 2"; break; } return sStatus; } gboolean cCity::isNearby(gint iCellX_, gint iCellY_) { if ( abs(iCellX_ - iCellX)<=2 && abs(iCellY_ - iCellY)<=2 ) return true; else return false; } gboolean cCity::isAt(gint cellRow_, gint cellCol_) { if( iCellX==cellRow_ && iCellY==cellCol_ ) return true; else return false; } gboolean cCity::isOccupied() { if( iCityPlayerId==-1 ) return false; else return true; }<|fim▁end|>
<|file_name|>global.rs<|end_file_name|><|fim▁begin|>//! The global state. use parking_lot::Mutex; use std::collections::HashSet; use std::{mem, panic}; use {rand, hazard, mpsc, debug, settings}; use garbage::Garbage; lazy_static! { /// The global state. /// /// This state is shared between all the threads. static ref STATE: State = State::new(); } /// Create a new hazard. /// /// This creates a new hazard and registers it in the global state. It's secondary, writer part is /// returned. pub fn create_hazard() -> hazard::Writer { STATE.create_hazard() } /// Export garbage into the global state. /// /// This adds the garbage, which will eventually be destroyed, to the global state. Note that this /// does not tick, and thus cannot cause garbage collection. pub fn export_garbage(garbage: Vec<Garbage>) { STATE.export_garbage(garbage) } /// Attempt to garbage collect. /// /// If another garbage collection is currently running, the thread will do nothing, and `Err(())` /// will be returned. Otherwise, it returns `Ok(())`. /// /// # Panic /// /// If a destructor panics, this will panic as well. pub fn try_gc() -> Result<(), ()> { STATE.try_gc() } /// Tick the clock. /// /// This shall be called when new garbage is added, as it will trigger a GC by some probability. pub fn tick() { // Generate a random number and compare it against the probability. if rand::random::<usize>() < settings::get().gc_probability { // The outfall was to (attempt at) GC. let _ = try_gc(); } } /// A message to the global state. enum Message { /// Add new garbage. Garbage(Vec<Garbage>), /// Add a new hazard. NewHazard(hazard::Reader), } /// The global state. /// /// The global state is shared between all threads and keeps track of the garbage and the active /// hazards. /// /// It is divided into two parts: The channel and the garbo. The channel buffers messages, which /// will eventually be executed at garbo, which holds all the data structures and is protected by a /// mutex. The garbo holds the other end to the channel. struct State { /// The message-passing channel. chan: mpsc::Sender<Message>, /// The garbo part of the state. garbo: Mutex<Garbo>, } impl State { /// Initialize a new state. fn new() -> State { // Create the message-passing channel. let (send, recv) = mpsc::channel(); // Construct the state from the two halfs of the channel. State { chan: send, garbo: Mutex::new(Garbo { chan: recv, garbage: Vec::new(), hazards: Vec::new(), }) } } /// Create a new hazard. /// /// This creates a new hazard and registers it in the global state. It's secondary, writer part /// is returned. fn create_hazard(&self) -> hazard::Writer { // Create the hazard. let (writer, reader) = hazard::create(); // Communicate the new hazard to the global state through the channel. self.chan.send(Message::NewHazard(reader)); // Return the other half of the hazard. writer } /// Export garbage into the global state. /// /// This adds the garbage, which will eventually be destroyed, to the global state. fn export_garbage(&self, garbage: Vec<Garbage>) { // Send the garbage to the message-passing channel of the state. self.chan.send(Message::Garbage(garbage)); } /// Try to collect the garbage. /// /// This will handle all of the messages in the channel and then attempt at collect the /// garbage. If another thread is currently collecting garbage, `Err(())` is returned, /// otherwise it returns `Ok(())`. /// /// Garbage collection works by scanning the hazards and dropping all the garbage which is not<|fim▁hole|> // Collect the garbage. garbo.gc(); Ok(()) } else { // Another thread is collecting. Err(()) } } } impl panic::RefUnwindSafe for State {} /// The garbo part of the state. /// /// This part is supposed to act like the garbage collecting part. It handles hazards, garbage, and /// the receiving point of the message-passing channel. struct Garbo { /// The channel of messages. chan: mpsc::Receiver<Message>, /// The to-be-destroyed garbage. garbage: Vec<Garbage>, /// The current hazards. hazards: Vec<hazard::Reader>, } impl Garbo { /// Handle a given message. /// /// "Handle" in this case refers to applying the operation defined by the message to the state, /// effectually executing the instruction of the message. fn handle(&mut self, msg: Message) { match msg { // Append the garbage bulk to the garbage list. Message::Garbage(mut garbage) => self.garbage.append(&mut garbage), // Register the new hazard into the state. Message::NewHazard(hazard) => self.hazards.push(hazard), } } /// Handle all the messages and garbage collect all unused garbage. /// /// # Panic /// /// If a destructor panics, this will panic as well. fn gc(&mut self) { // Print message in debug mode. debug::exec(|| println!("Collecting garbage.")); // Handle all the messages sent. for msg in self.chan.recv_all() { self.handle(msg); } // Create the set which will keep the _active_ hazards. let mut active = HashSet::with_capacity(self.hazards.len()); // Take out the hazards and go over them one-by-one. let len = self.hazards.len(); // TODO: This should be substituted into next line. for hazard in mem::replace(&mut self.hazards, Vec::with_capacity(len)) { match hazard.get() { // The hazard is dead, so the other end (the writer) is not available anymore, // hence we can safely destroy it. hazard::State::Dead => unsafe { hazard.destroy() }, // The hazard is free and must thus be put back to the hazard list. hazard::State::Free => self.hazards.push(hazard), hazard::State::Protect(ptr) => { // This hazard is active, hence we insert the pointer it contains in our // "active" set. active.insert(ptr); // Since the hazard is still alive, we must put it back to the hazard list for // future use. self.hazards.push(hazard); }, } } // Scan the garbage for unused objects. self.garbage.retain(|garbage| active.contains(&garbage.ptr())) } } impl Drop for Garbo { fn drop(&mut self) { // Do a final GC. self.gc(); } } #[cfg(test)] mod tests { use super::*; use garbage::Garbage; use std::{panic, ptr}; #[test] fn dtor_runs() { fn dtor(x: *const u8) { unsafe { *(x as *mut u8) = 1; } } let s = State::new(); for _ in 0..1000 { let b = Box::new(0); let h = s.create_hazard(); h.protect(&*b); s.export_garbage(vec![Garbage::new(&*b, dtor)]); while s.try_gc().is_err() {} assert_eq!(*b, 0); while s.try_gc().is_err() {} h.free(); while s.try_gc().is_err() {} assert_eq!(*b, 1); h.kill(); } } #[test] fn clean_up_state() { fn dtor(x: *const u8) { unsafe { *(x as *mut u8) = 1; } } for _ in 0..1000 { let b = Box::new(0); { let s = State::new(); s.export_garbage(vec![Garbage::new(&*b, dtor)]); } assert_eq!(*b, 1); } } #[test] fn panic_invalidate_state() { fn panic(_: *const u8) { panic!(); } fn dtor(x: *const u8) { unsafe { *(x as *mut u8) = 1; } } let s = State::new(); let b = Box::new(0); let h = create_hazard(); h.protect(&*b); s.export_garbage(vec![Garbage::new(&*b, dtor), Garbage::new(0x2 as *const u8, panic)]); let _ = panic::catch_unwind(|| { while s.try_gc().is_err() {} }); assert_eq!(*b, 0); h.free(); while s.try_gc().is_err() {} assert_eq!(*b, 1); } #[test] #[should_panic] fn panic_in_dtor() { fn dtor(_: *const u8) { panic!(); } let s = State::new(); s.export_garbage(vec![Garbage::new(ptr::null(), dtor)]); while s.try_gc().is_err() {} } #[cfg(debug_assertions)] #[test] #[should_panic] fn debug_more_hazards() { let s = State::new(); let h = s.create_hazard(); h.free(); mem::forget(h); } }<|fim▁end|>
/// currently active in the hazards. fn try_gc(&self) -> Result<(), ()> { // Lock the "garbo" (the part of the state needed to GC). if let Some(mut garbo) = self.garbo.try_lock() {
<|file_name|>WellTile.js<|end_file_name|><|fim▁begin|>"use strict"; class WellTile extends Tile { constructor(row, column) { super("brunnen.gif", row, column); this.tooltip_name = "WELL"; this.build_costs = { humansidle: 5 }; this.production = { water: 1 }; this.counter = 1; this.workingspeed = 5;<|fim▁hole|> clone() { return new WellTile(this.row, this.column); } getUpgrades() { return [new GrassTile(),] } };<|fim▁end|>
}
<|file_name|>1.cc<|end_file_name|><|fim▁begin|>// 20020717 gdr // Copyright (C) 2002-2021 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option)<|fim▁hole|>// but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // Test slice class invariants #include <valarray> #include <cstdlib> #include <testsuite_hooks.h> bool construction(std::size_t start, std::size_t size, std::size_t stride) { std::slice s(start, size, stride); return s.start() == start && s.size() == size && s.stride() == stride; } bool copy(std::size_t start, std::size_t size, std::size_t stride) { std::slice s(start, size, stride); std::slice t = s; return t.start() == start && t.size() == size && t.stride() == stride; } bool assignment(std::size_t start, std::size_t size, std::size_t stride) { std::slice s(start, size, stride); std::slice t; t = s; return t.start() == start && t.size() == size && t.stride() == stride; } int main() { std::srand(20020717); using std::rand; VERIFY(construction(rand(), rand(), rand())); VERIFY(copy(rand(), rand(), rand())); VERIFY(assignment(rand(), rand(), rand())); return 0; }<|fim▁end|>
// any later version. // This library is distributed in the hope that it will be useful,
<|file_name|>espressopp_polymer_melt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: iso-8859-1 -*- ########################################################################### # # # ESPResSo++ Benchmark Python script for a polymer melt # # # ########################################################################### import sys import time import espresso import mpi4py.MPI as MPI import logging from espresso import Real3D, Int3D from espresso.tools import lammps, gromacs from espresso.tools import decomp, timers, replicate # simulation parameters (nvt = False is nve) steps = 1000 rc = 1.12 skin = 0.3 nvt = True timestep = 0.01 ###################################################################### ### IT SHOULD BE UNNECESSARY TO MAKE MODIFICATIONS BELOW THIS LINE ### ###################################################################### sys.stdout.write('Setting up simulation ...\n') bonds, angles, x, y, z, Lx, Ly, Lz = lammps.read('espressopp_polymer_melt.start') bonds, angles, x, y, z, Lx, Ly, Lz = replicate(bonds, angles, x, y, z, Lx, Ly, Lz, xdim=1, ydim=1, zdim=1) num_particles = len(x) density = num_particles / (Lx * Ly * Lz) size = (Lx, Ly, Lz) system = espresso.System() system.rng = espresso.esutil.RNG() system.bc = espresso.bc.OrthorhombicBC(system.rng, size) system.skin = skin comm = MPI.COMM_WORLD nodeGrid = espresso.tools.decomp.nodeGrid(comm.size) cellGrid = espresso.tools.decomp.cellGrid(size,nodeGrid,rc,skin) system.storage = espresso.storage.DomainDecomposition(system, nodeGrid, cellGrid) # add particles to the system and then decompose # do this in chunks of 1000 particles to speed it up props = ['id', 'type', 'mass', 'pos'] new_particles = [] for i in range(num_particles): part = [i + 1, 0, 1.0, Real3D(x[i], y[i], z[i])] new_particles.append(part) if i % 1000 == 0: system.storage.addParticles(new_particles, *props) system.storage.decompose() new_particles = []<|fim▁hole|>system.storage.decompose() # Lennard-Jones with Verlet list vl = espresso.VerletList(system, cutoff = rc + system.skin) potLJ = espresso.interaction.LennardJones(1.0, 1.0, cutoff = rc, shift = False) interLJ = espresso.interaction.VerletListLennardJones(vl) interLJ.setPotential(type1 = 0, type2 = 0, potential = potLJ) system.addInteraction(interLJ) # FENE bonds fpl = espresso.FixedPairList(system.storage) fpl.addBonds(bonds) potFENE = espresso.interaction.FENE(K=30.0, r0=0.0, rMax=1.5) interFENE = espresso.interaction.FixedPairListFENE(system, fpl, potFENE) system.addInteraction(interFENE) # Cosine with FixedTriple list ftl = espresso.FixedTripleList(system.storage) ftl.addTriples(angles) potCosine = espresso.interaction.Cosine(K=1.5, theta0=3.1415926) interCosine = espresso.interaction.FixedTripleListCosine(system, ftl, potCosine) #interCosine.setPotential(type1 = 0, type2 = 0, potential = potCosine) system.addInteraction(interCosine) # integrator integrator = espresso.integrator.VelocityVerlet(system) integrator.dt = timestep if(nvt): langevin = espresso.integrator.LangevinThermostat(system) langevin.gamma = 1.0 langevin.temperature = 1.0 integrator.addExtension(langevin) # print simulation parameters print '' print 'number of particles =', num_particles print 'density = %.4f' % (density) print 'rc =', rc print 'dt =', integrator.dt print 'skin =', system.skin print 'nvt =', nvt print 'steps =', steps print 'NodeGrid = %s' % (nodeGrid) print 'CellGrid = %s' % (cellGrid) print '' # analysis # configurations = espresso.analysis.Configurations(system) # configurations.gather() temperature = espresso.analysis.Temperature(system) pressure = espresso.analysis.Pressure(system) pressureTensor = espresso.analysis.PressureTensor(system) fmt = '%5d %8.4f %10.5f %8.5f %12.3f %12.3f %12.3f %12.3f %12.3f\n' T = temperature.compute() P = pressure.compute() Pij = pressureTensor.compute() Ek = 0.5 * T * (3 * num_particles) Ep = interLJ.computeEnergy() Eb = interFENE.computeEnergy() Ea = interCosine.computeEnergy() Etotal = Ek + Ep + Eb + Ea sys.stdout.write(' step T P Pxy etotal ekinetic epair ebond eangle\n') sys.stdout.write(fmt % (0, T, P, Pij[3], Etotal, Ek, Ep, Eb, Ea)) start_time = time.clock() integrator.run(steps) end_time = time.clock() T = temperature.compute() P = pressure.compute() Pij = pressureTensor.compute() Ek = 0.5 * T * (3 * num_particles) Ep = interLJ.computeEnergy() Eb = interFENE.computeEnergy() Ea = interCosine.computeEnergy() Etotal = Ek + Ep + Eb + Ea sys.stdout.write(fmt % (steps, T, P, Pij[3], Etotal, Ek, Ep, Eb, Ea)) sys.stdout.write('\n') # print timings and neighbor list information timers.show(integrator.getTimers(), precision=2) sys.stdout.write('Total # of neighbors = %d\n' % vl.totalSize()) sys.stdout.write('Ave neighs/atom = %.1f\n' % (vl.totalSize() / float(num_particles))) sys.stdout.write('Neighbor list builds = %d\n' % vl.builds) sys.stdout.write('Integration steps = %d\n' % integrator.step) sys.stdout.write('CPUs = %i CPU time per CPU = %.1f\n' % (comm.size,end_time - start_time))<|fim▁end|>
system.storage.addParticles(new_particles, *props)
<|file_name|>registerCtrl.js<|end_file_name|><|fim▁begin|>capstone.controller("RegisterCtrl", function($scope,$http,AuthFactory,$location,user1){ // $(".button-collapse").sideNav(); $http.get(`states.json`) .then((data)=>{ $scope.stateName = data.data console.log($scope.stateName) $('input.autocomplete').autocomplete({ data: $scope.stateName, limit: 10 // The max amount of results that can be shown at once. Default: Infinity. }); }) $scope.date = new Date(); let storageRef = firebase.storage().ref(); let inputElement = document.getElementById("fileInput"); inputElement.addEventListener("change", handleFiles, false) function handleFiles() { var fileList = this.files; /* now you can work with the file list */ console.log("filelist[0]", fileList[0]) storageRef.child(fileList[0].name).put(fileList[0]) .then(function(snapshot) { console.log('Uploaded a blob or file!'); <|fim▁hole|> storageRef.child(fileList[0].name).getDownloadURL() .then((url)=>{ var img =document.getElementById("myImg") img.src = url; $scope.img = img.src; }) .catch((error)=>{ alert("error") }) }); } $scope.register = () => { if($scope.user_email === $scope.user_confirmEmail){ AuthFactory.getter($scope.user_email,$scope.user_password) .then ((data)=> { console.log(data) $scope.UID = data // $http.post(`https://frontendcapstone.firebaseio.com/users/.json`,{ // uid: $scope.UID // }) $http.post(`https://frontendcapstone.firebaseio.com/users/${$scope.UID}.json`,{ uid: $scope.UID, Firstname: $scope.firstName, Lastname: $scope.lastName, email: $scope.user_email, password: $scope.user_password, DOB: $scope.user_dob, imageUrl : $scope.img, Address: {Address1: $scope.user_addressLine1, Address2: $scope.user_addressLine2, City: $scope.user_city, state: $scope.user_state, zipcode: $scope.user_zipcode} }) Materialize.toast("registered successfully", 2000) $location.path(`/`) }) } else { Materialize.toast("Emails have to match", 1000) $("input[type='email']").focus() } } })<|fim▁end|>
<|file_name|>instance_molten_core.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016+ AzerothCore <www.azerothcore.org>, released under GNU GPL v2 license: http://github.com/azerothcore/azerothcore-wotlk/LICENSE-GPL2 * Copyright (C) 2008-2016 TrinityCore <http://www.trinitycore.org/> * Copyright (C) 2005-2009 MaNGOS <http://getmangos.com/> */ /* ScriptData SDName: Instance_Molten_Core SD%Complete: 0 SDComment: Place Holder SDCategory: Molten Core EndScriptData */ #include "ObjectMgr.h" #include "ScriptMgr.h" #include "InstanceScript.h" #include "CreatureAI.h" #include "molten_core.h" #include "TemporarySummon.h" Position const SummonPositions[10] = { {737.850f, -1145.35f, -120.288f, 4.71368f}, {744.162f, -1151.63f, -119.726f, 4.58204f}, {751.247f, -1152.82f, -119.744f, 4.49673f}, {759.206f, -1155.09f, -120.051f, 4.30104f}, {755.973f, -1152.33f, -120.029f, 4.25588f}, {731.712f, -1147.56f, -120.195f, 4.95955f}, {726.499f, -1149.80f, -120.156f, 5.24055f}, {722.408f, -1152.41f, -120.029f, 5.33087f}, {718.994f, -1156.36f, -119.805f, 5.75738f}, {838.510f, -829.840f, -232.000f, 2.00000f}, }; class instance_molten_core : public InstanceMapScript { public: instance_molten_core() : InstanceMapScript("instance_molten_core", 409) { } struct instance_molten_core_InstanceMapScript : public InstanceScript { instance_molten_core_InstanceMapScript(Map* map) : InstanceScript(map) { SetBossNumber(MAX_ENCOUNTER); _golemaggTheIncineratorGUID = 0; _majordomoExecutusGUID = 0; _cacheOfTheFirelordGUID = 0; _executusSchedule = NULL; _deadBossCount = 0; _ragnarosAddDeaths = 0; _isLoading = false; _summonedExecutus = false; } ~instance_molten_core_InstanceMapScript() { delete _executusSchedule; } void OnPlayerEnter(Player* /*player*/) { if (_executusSchedule) { SummonMajordomoExecutus(*_executusSchedule); delete _executusSchedule; _executusSchedule = NULL; } } void OnCreatureCreate(Creature* creature) { switch (creature->GetEntry()) { case NPC_GOLEMAGG_THE_INCINERATOR: _golemaggTheIncineratorGUID = creature->GetGUID(); break; case NPC_MAJORDOMO_EXECUTUS: _majordomoExecutusGUID = creature->GetGUID(); break; default: break; } } void OnGameObjectCreate(GameObject* go) { switch (go->GetEntry()) { case GO_CACHE_OF_THE_FIRELORD: _cacheOfTheFirelordGUID = go->GetGUID(); break; default: break; } } void SetData(uint32 type, uint32 data) { if (type == DATA_RAGNAROS_ADDS) { if (data == 1) ++_ragnarosAddDeaths; else if (data == 0) _ragnarosAddDeaths = 0; } } uint32 GetData(uint32 type) const { switch (type) { case DATA_RAGNAROS_ADDS: return _ragnarosAddDeaths; } return 0; } uint64 GetData64(uint32 type) const { switch (type) { case BOSS_GOLEMAGG_THE_INCINERATOR: return _golemaggTheIncineratorGUID; case BOSS_MAJORDOMO_EXECUTUS: return _majordomoExecutusGUID; } return 0; } bool SetBossState(uint32 bossId, EncounterState state) { if (!InstanceScript::SetBossState(bossId, state)) return false; if (state == DONE && bossId < BOSS_MAJORDOMO_EXECUTUS) ++_deadBossCount; if (_isLoading) return true; if (_deadBossCount == 8) SummonMajordomoExecutus(false); if (bossId == BOSS_MAJORDOMO_EXECUTUS && state == DONE) DoRespawnGameObject(_cacheOfTheFirelordGUID, 7 * DAY); return true; } void SummonMajordomoExecutus(bool done) { if (_summonedExecutus) return; _summonedExecutus = true; if (!done) { instance->SummonCreature(NPC_MAJORDOMO_EXECUTUS, SummonPositions[0]); instance->SummonCreature(NPC_FLAMEWAKER_HEALER, SummonPositions[1]); instance->SummonCreature(NPC_FLAMEWAKER_HEALER, SummonPositions[2]); instance->SummonCreature(NPC_FLAMEWAKER_HEALER, SummonPositions[3]); instance->SummonCreature(NPC_FLAMEWAKER_HEALER, SummonPositions[4]); instance->SummonCreature(NPC_FLAMEWAKER_ELITE, SummonPositions[5]); instance->SummonCreature(NPC_FLAMEWAKER_ELITE, SummonPositions[6]); instance->SummonCreature(NPC_FLAMEWAKER_ELITE, SummonPositions[7]); instance->SummonCreature(NPC_FLAMEWAKER_ELITE, SummonPositions[8]); } else if (TempSummon* summon = instance->SummonCreature(NPC_MAJORDOMO_EXECUTUS, RagnarosTelePos)) summon->AI()->DoAction(ACTION_START_RAGNAROS_ALT); } std::string GetSaveData() { OUT_SAVE_INST_DATA; std::ostringstream saveStream; saveStream << "M C " << GetBossSaveData(); OUT_SAVE_INST_DATA_COMPLETE; return saveStream.str(); } void Load(char const* data) { if (!data) { OUT_LOAD_INST_DATA_FAIL; return; } _isLoading = true; OUT_LOAD_INST_DATA(data); char dataHead1, dataHead2; std::istringstream loadStream(data); loadStream >> dataHead1 >> dataHead2; if (dataHead1 == 'M' && dataHead2 == 'C') { EncounterState states[MAX_ENCOUNTER]; uint8 executusCounter = 0; // need 2 loops to check spawning executus/ragnaros for (uint8 i = 0; i < MAX_ENCOUNTER; ++i) { uint32 tmpState; loadStream >> tmpState; if (tmpState == IN_PROGRESS || tmpState > TO_BE_DECIDED) tmpState = NOT_STARTED; states[i] = EncounterState(tmpState); if (tmpState == DONE && i < BOSS_MAJORDOMO_EXECUTUS) ++executusCounter; } if (executusCounter >= 8 && states[BOSS_RAGNAROS] != DONE) _executusSchedule = new bool(states[BOSS_MAJORDOMO_EXECUTUS] == DONE); for (uint8 i = 0; i < MAX_ENCOUNTER; ++i) SetBossState(i, states[i]); } else OUT_LOAD_INST_DATA_FAIL; OUT_LOAD_INST_DATA_COMPLETE; _isLoading = false; } private: uint64 _golemaggTheIncineratorGUID; uint64 _majordomoExecutusGUID; uint64 _cacheOfTheFirelordGUID; bool* _executusSchedule; uint8 _deadBossCount; uint8 _ragnarosAddDeaths; bool _isLoading; bool _summonedExecutus; }; InstanceScript* GetInstanceScript(InstanceMap* map) const { return new instance_molten_core_InstanceMapScript(map); } }; void AddSC_instance_molten_core() { new instance_molten_core();<|fim▁hole|><|fim▁end|>
}
<|file_name|>pyprops.cpp<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> #include <string> #include <sstream> using std::string; using std::ostringstream; // 'cache' is a global c-string -> python unicode name cache. This // conversion is relatively costly in python3, so this cache save // repeating the work many times each frame. pyAttrCache cache; // Constructor pyPropertyNode::pyPropertyNode() { // printf("pyPropertyNode()\n"); pObj = NULL; } pyPropertyNode::pyPropertyNode(const pyPropertyNode &node) { // printf("visiting pyPropertyNode() copy constructor!\n"); pObj = node.pObj; Py_INCREF(pObj); } pyPropertyNode::pyPropertyNode(PyObject *p) { // printf("pyPropertyNode(pObj)\n"); pObj = p; } // Destructor. pyPropertyNode::~pyPropertyNode() { // printf("~pyPropertyNode destructor\n"); if ( pObj == NULL ) { printf("WARNING: calling destructor on null pyPropertyNode\n"); // Py_DECREF(pObj); } Py_XDECREF(pObj); pObj = NULL; } // Assignment operator pyPropertyNode & pyPropertyNode::operator= (const pyPropertyNode &node) { // printf("Visiting pyPropertyNode operator=\n"); if (this != &node) { // protect against invalid self-assignment if ( pObj != NULL ) { // 1: decrement current pObj reference because we are losing it // printf("decrementing existing pObj before overwritting it\n"); Py_DECREF(pObj); } // 2: copy new value to pObj pObj = node.pObj; // 3: increment pObj ref count because we just created another // reference to it. Py_INCREF(pObj); } // by convention, always return *this return *this; } // test if pObj has named child attribute bool pyPropertyNode::hasChild(const char *name) { if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { return true; } } return false; } // Return a pyPropertyNode object that points to the named child pyPropertyNode pyPropertyNode::getChild(const char *name, bool create) { if ( pObj == NULL ) { return pyPropertyNode(); } PyObject *pValue = PyObject_CallMethod(pObj, (char *)"getChild", (char *)"sb", name, create); if ( PyErr_Occurred() ) PyErr_Print(); if (pValue == NULL) { fprintf(stderr,"Call failed\n"); return pyPropertyNode(); } // give pValue over to the returned property node return pyPropertyNode(pValue); } pyPropertyNode pyPropertyNode::getChild(const char *name, int index, bool create) { if ( pObj == NULL ) { return pyPropertyNode(); } ostringstream ename; ename << name << '[' << index << ']'; // printf("ename = %s\n", ename.str().c_str()); return getChild(ename.str().c_str(), create); } // return true if pObj pointer is NULL bool pyPropertyNode::isNull() { return pObj == NULL; } // return length of attr if it is a list (enumerated) int pyPropertyNode::getLen(const char *name) { if ( pObj != NULL ) { PyObject *pValue = PyObject_CallMethod(pObj, (char *)"getLen", (char *)"s", name); if ( PyErr_Occurred() ) PyErr_Print(); if ( pValue != NULL ) { int len = PyLong_AsLong(pValue); Py_DECREF(pValue); return len; } } return 0; } // return true if pObj is a list (enumerated) void pyPropertyNode::setLen(const char *name, int size) { if ( pObj != NULL ) { PyObject *pValue = PyObject_CallMethod(pObj, (char *)"setLen", (char *)"si", name, size); if ( PyErr_Occurred() ) PyErr_Print(); if ( pValue != NULL ) { Py_DECREF(pValue); } } } // return true if pObj is a list (enumerated) void pyPropertyNode::setLen(const char *name, int size, double init_val) { if ( pObj != NULL ) { PyObject *pValue = PyObject_CallMethod(pObj, (char *)"setLen", (char *)"sif", name, size, init_val); if ( PyErr_Occurred() ) PyErr_Print(); if ( pValue != NULL ) { Py_DECREF(pValue); } } } // return true if pObj is a list (enumerated) vector <string> pyPropertyNode::getChildren(bool expand) { vector <string> result; if ( pObj != NULL ) { PyObject *pList = PyObject_CallMethod(pObj, (char *)"getChildren", (char *)"b", expand); if ( PyErr_Occurred() ) PyErr_Print(); if ( pList != NULL ) { if ( PyList_Check(pList) ) { int len = PyList_Size(pList); for ( int i = 0; i < len; i++ ) { PyObject *pItem = PyList_GetItem(pList, i); // note: PyList_GetItem doesn't give us ownership // of pItem so we should not decref() it. PyObject *pStr = PyObject_Str(pItem); result.push_back( (string)PyUnicode_AsUTF8(pStr) ); Py_DECREF(pStr); } } Py_DECREF(pList); } } return result; } // return true if pObj/name is leaf bool pyPropertyNode::isLeaf(const char *name) { if ( pObj == NULL ) { return false; } PyObject *pValue = PyObject_CallMethod(pObj, (char *)"isLeaf", (char *)"s", name); if ( PyErr_Occurred() ) PyErr_Print(); bool result = PyObject_IsTrue(pValue); Py_DECREF(pValue); return result; } // note: expects the calling layer to Py_DECREF(pAttr) double pyPropertyNode::PyObject2Double(const char *name, PyObject *pAttr) { double result = 0.0; if ( pAttr != NULL ) { if ( PyFloat_Check(pAttr) ) { result = PyFloat_AsDouble(pAttr); } else if ( PyLong_Check(pAttr) ) { result = PyLong_AsLong(pAttr); } else if ( PyLong_Check(pAttr) ) { result = PyLong_AsLong(pAttr); } else if ( PyBytes_Check(pAttr) ) { PyObject *pFloat = PyFloat_FromString(pAttr); if ( pFloat != NULL ) { result = PyFloat_AsDouble(pFloat); Py_DECREF(pFloat); } else { if ( PyErr_Occurred() ) PyErr_Print(); printf("WARNING: conversion from string to float failed\n"); PyObject *pStr = PyObject_Str(pAttr); const char *s = PyUnicode_AsUTF8(pStr); printf(" %s='%s'\n", name, s); Py_DECREF(pStr); } } else { printf("Unknown object type: '%s' ", pObj->ob_type->tp_name); PyObject *pStr = PyObject_Str(pObj); const char *s = PyUnicode_AsUTF8(pStr); printf(" %s='%s'\n", name, s); Py_DECREF(pStr); } } return result; } // note: expects the calling layer to Py_DECREF(pAttr) long pyPropertyNode::PyObject2Long(const char *name, PyObject *pAttr) { long result = 0; if ( pAttr != NULL ) { if ( PyLong_Check(pAttr) ) { result = PyLong_AsLong(pAttr); } else if ( PyFloat_Check(pAttr) ) { result = (long)PyFloat_AsDouble(pAttr); } else if ( PyBytes_Check(pAttr) ) { PyObject *pFloat = PyFloat_FromString(pAttr); if ( pFloat != NULL ) { result = PyFloat_AsDouble(pFloat); Py_DECREF(pFloat); } else { if ( PyErr_Occurred() ) PyErr_Print(); printf("WARNING: conversion from string to long failed\n"); PyObject *pStr = PyObject_Str(pAttr); const char *s = PyUnicode_AsUTF8(pStr); printf(" %s='%s'\n", name, s); Py_DECREF(pStr); } } else { printf("Unknown object type: '%s' ", pAttr->ob_type->tp_name); PyObject *pStr = PyObject_Str(pAttr); const char *s = PyUnicode_AsUTF8(pStr); printf(" %s='%s'\n", name, s); Py_DECREF(pStr); } } return result; } // value getters double pyPropertyNode::getDouble(const char *name) { double result = 0.0; if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pAttr = PyObject_GetAttr(pObj, attrObj); if ( pAttr != NULL ) { result = PyObject2Double(name, pAttr); Py_DECREF(pAttr); } } } return result; } long pyPropertyNode::getLong(const char *name) { long result = 0; if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pAttr = PyObject_GetAttr(pObj, attrObj); if ( pAttr != NULL ) { result = PyObject2Long(name, pAttr); Py_DECREF(pAttr); } } } return result; } bool pyPropertyNode::getBool(const char *name) { bool result = false; if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pAttr = PyObject_GetAttr(pObj, attrObj); if ( pAttr != NULL ) { result = PyObject_IsTrue(pAttr); Py_DECREF(pAttr); } } } return result; } string pyPropertyNode::getString(const char *name) { string result = ""; if ( pObj != NULL ) { // test for normal vs. enumerated request char *pos = strchr((char *)name, '['); if ( pos == NULL ) { // normal request PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pAttr = PyObject_GetAttr(pObj, attrObj); if ( pAttr != NULL ) { PyObject *pStr = PyObject_Str(pAttr); if ( pStr != NULL ) { result = (string)PyUnicode_AsUTF8(pStr); Py_DECREF(pStr); } Py_DECREF(pAttr); } } } else { // enumerated request // this is a little goofy, but this code typically only runs // on an interactive telnet request, and we don't want to // modify the request string in place. string base = name; size_t basepos = base.find("["); if ( basepos != string::npos ) { base = base.substr(0, basepos); } pos++; int index = atoi(pos); result = getString(base.c_str(), index); // printf("%s %d %s\n", name, index, result.c_str()); } } return result; } // indexed value getters double pyPropertyNode::getDouble(const char *name, int index) { double result = 0.0; if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pList = PyObject_GetAttr(pObj, attrObj); if ( pList != NULL ) { if ( PyList_Check(pList) ) { if ( index < PyList_Size(pList) ) { PyObject *pAttr = PyList_GetItem(pList, index); // note: PyList_GetItem doesn't give us ownership // of pAttr so we should not decref() it. if ( pAttr != NULL ) { result = PyObject2Double(name, pAttr); } } } else { printf("WARNING: request indexed value of plain node: %s!\n", name); } Py_DECREF(pList); } } } return result; } long pyPropertyNode::getLong(const char *name, int index) { long result = 0; if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pList = PyObject_GetAttr(pObj, attrObj); if ( pList != NULL ) { if ( PyList_Check(pList) ) { if ( index < PyList_Size(pList) ) { PyObject *pAttr = PyList_GetItem(pList, index); // note: PyList_GetItem doesn't give us ownership // of pAttr so we should not decref() it. if ( pAttr != NULL ) { result = PyObject2Long(name, pAttr); } } } else { printf("WARNING: request indexed value of plain node: %s!\n", name); } Py_DECREF(pList); } } } return result; } string pyPropertyNode::getString(const char *name, int index) { string result = ""; if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pList = PyObject_GetAttr(pObj, attrObj); if ( pList != NULL ) { if ( PyList_Check(pList) ) { if ( index < PyList_Size(pList) ) { PyObject *pAttr = PyList_GetItem(pList, index); // note: PyList_GetItem doesn't give us ownership // of pAttr so we should not decref() it. if ( pAttr != NULL ) { PyObject *pStr = PyObject_Str(pAttr); if ( pStr != NULL ) { result = (string)PyUnicode_AsUTF8(pStr); Py_DECREF(pStr); } } } } else { printf("WARNING: request indexed value of plain node: %s!\n", name); } Py_DECREF(pList); } } } return result; } bool pyPropertyNode::getBool(const char *name, int index) { bool result = false; if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); if ( PyObject_HasAttr(pObj, attrObj) ) { PyObject *pList = PyObject_GetAttr(pObj, attrObj); if ( pList != NULL ) { if ( PyList_Check(pList) ) { if ( index < PyList_Size(pList) ) { PyObject *pAttr = PyList_GetItem(pList, index); // note: PyList_GetItem doesn't give us ownership // of pAttr so we should not decref() it. if ( pAttr != NULL ) { result = PyObject_IsTrue(pAttr); } } } else { printf("WARNING: request indexed value of plain node: %s!\n", name); } Py_DECREF(pList); } } } return result; } // value setters bool pyPropertyNode::setDouble( const char *name, double val ) { if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); PyObject *pFloat = PyFloat_FromDouble(val); int result = PyObject_SetAttr(pObj, attrObj, pFloat); Py_DECREF(pFloat); return result != -1; } else { return false; } } bool pyPropertyNode::setLong( const char *name, long val ) { if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); PyObject *pLong = PyLong_FromLong(val); int result = PyObject_SetAttr(pObj, attrObj, pLong); Py_DECREF(pLong); return result != -1; } else { return false; } } bool pyPropertyNode::setBool( const char *name, bool val ) { if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); PyObject *pBool = PyBool_FromLong((long)val); int result = PyObject_SetAttr(pObj, attrObj, pBool); Py_DECREF(pBool); return result != -1; } else { return false; } } bool pyPropertyNode::setString( const char *name, string val ) { if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); PyObject *pString = PyUnicode_FromString(val.c_str()); int result = PyObject_SetAttr(pObj, attrObj, pString); Py_DECREF(pString); return result != -1; } else { return false; } } // indexed value setters bool pyPropertyNode::setDouble( const char *name, int index, double val ) { if ( pObj != NULL ) { PyObject *attrObj = cache.get_attr(name); PyObject *pList = PyObject_GetAttr(pObj, attrObj); if ( pList != NULL ) { if ( PyList_Check(pList) ) { if ( index < PyList_Size(pList) ) { PyObject *pFloat = PyFloat_FromDouble(val); // note setitem() steals the reference so we can't // decrement it PyList_SetItem(pList, index, pFloat ); } else { // index out of range } } else { // not a list } Py_DECREF(pList); } else { // list lookup failed } } else { return false; } return true; } // Return a pyPropertyNode object that points to the named child void pyPropertyNode::pretty_print() { if ( pObj == NULL ) { printf("pretty_print(): Null pyPropertyNode()\n"); } else { PyObject *pValue = PyObject_CallMethod(pObj, (char *)"pretty_print", (char *)""); if ( PyErr_Occurred() ) PyErr_Print(); if (pValue != NULL) { Py_DECREF(pValue); } else { fprintf(stderr,"Call failed\n"); } } } // These only need to be looked up once and then saved static PyObject *pModuleProps = NULL; static PyObject *pModuleJSON = NULL; static PyObject *pModuleXML = NULL; // This function must be called before any pyPropertyNode usage. It // imports the python props and props_json/xml modules. void pyPropsInit() { // python property system pModuleProps = PyImport_ImportModule("props"); if ( PyErr_Occurred() ) PyErr_Print(); if (pModuleProps == NULL) { fprintf(stderr, "Failed to load 'props'\n"); } // Json I/O system pModuleJSON = PyImport_ImportModule("props_json"); if ( PyErr_Occurred() ) PyErr_Print(); if (pModuleJSON == NULL) { fprintf(stderr, "Failed to load 'props_json'\n"); } // xml I/O system pModuleXML = PyImport_ImportModule("props_xml"); if ( PyErr_Occurred() ) PyErr_Print(); if (pModuleXML == NULL) { fprintf(stderr, "Failed to load 'props_xml'\n"); } } // This function can be called at exit to properly free resources // requested by init() extern void pyPropsCleanup(void) { printf("running pyPropsCleanup()\n"); Py_XDECREF(pModuleProps); Py_XDECREF(pModuleXML); } // Return a pyPropertyNode object that points to the specified path in // the property tree. This is a 'heavier' operation so it is // recommended to call this function from initialization routines and // save the result. Then use the pyPropertyNode for direct read/write // access in your update routines. pyPropertyNode pyGetNode(string abs_path, bool create) { PyObject *attrObj = cache.get_attr("getNode"); PyObject *pFuncGetNode = PyObject_GetAttr(pModuleProps, attrObj); if ( PyErr_Occurred() ) PyErr_Print(); if ( pFuncGetNode == NULL || ! PyCallable_Check(pFuncGetNode) ) { fprintf(stderr, "Cannot find function 'getNode()'\n"); return pyPropertyNode(); } // FIXME decref pFuncGetNode PyObject *pPath = PyUnicode_FromString(abs_path.c_str()); PyObject *pCreate = PyBool_FromLong(create); if (!pPath || !pCreate) { Py_XDECREF(pPath); Py_XDECREF(pCreate); fprintf(stderr, "Cannot convert argument\n"); return pyPropertyNode(); } PyObject *pValue = PyObject_CallFunctionObjArgs(pFuncGetNode, pPath, pCreate, NULL); if ( PyErr_Occurred() ) PyErr_Print(); Py_DECREF(pPath); Py_DECREF(pCreate); if (pValue != NULL) { // give pValue over to the returned property node /*printf("pyGetNode() success, creating pyPropertyNode\n"); pyPropertyNode tmp(pValue); printf("before return\n");*/ return pyPropertyNode(pValue); } else { printf("Call failed\n"); return pyPropertyNode(); } return pyPropertyNode(); } bool readXML(string filename, pyPropertyNode *node) { // getNode() function PyObject *pFuncLoad = PyObject_GetAttrString(pModuleXML, "load"); if ( PyErr_Occurred() ) PyErr_Print(); if ( pFuncLoad == NULL || ! PyCallable_Check(pFuncLoad) ) { fprintf(stderr, "Cannot find function 'load()'\n"); return false; } PyObject *pPath = PyBytes_FromString(filename.c_str()); if (!pPath || !node->pObj) { Py_XDECREF(pPath); Py_XDECREF(pFuncLoad); fprintf(stderr, "Cannot convert argument\n"); return false; } PyObject *pValue = PyObject_CallFunctionObjArgs(pFuncLoad, pPath, node->pObj, NULL); if ( PyErr_Occurred() ) PyErr_Print(); Py_DECREF(pPath); Py_DECREF(pFuncLoad); if (pValue != NULL) { // give pValue over to the returned property node bool result = PyObject_IsTrue(pValue); Py_DECREF(pValue); return result; } else { fprintf(stderr,"Call failed\n"); } return false; } bool writeXML(string filename, pyPropertyNode *node) { // getNode() function PyObject *pFuncSave = PyObject_GetAttrString(pModuleXML, "save"); if ( PyErr_Occurred() ) PyErr_Print(); if ( pFuncSave == NULL || ! PyCallable_Check(pFuncSave) ) { fprintf(stderr, "Cannot find function 'save()'\n"); return false; } PyObject *pPath = PyBytes_FromString(filename.c_str()); if (!pPath || !node->pObj) { Py_XDECREF(pPath); Py_XDECREF(pFuncSave); fprintf(stderr, "Cannot convert argument\n"); return false; } PyObject *pValue = PyObject_CallFunctionObjArgs(pFuncSave, pPath, node->pObj, NULL); if ( PyErr_Occurred() ) PyErr_Print(); Py_DECREF(pPath); Py_DECREF(pFuncSave); if (pValue != NULL) { // give pValue over to the returned property node bool result = PyObject_IsTrue(pValue); Py_DECREF(pValue); return result; } else { fprintf(stderr,"Call failed\n"); } return false; } bool readJSON(string filename, pyPropertyNode *node) { // getNode() function PyObject *pFuncLoad = PyObject_GetAttrString(pModuleJSON, "load"); if ( PyErr_Occurred() ) PyErr_Print(); if ( pFuncLoad == NULL || ! PyCallable_Check(pFuncLoad) ) { fprintf(stderr, "Cannot find function 'load()'\n"); return false; } PyObject *pPath = PyUnicode_FromString(filename.c_str()); if (!pPath || !node->pObj) { Py_XDECREF(pPath); Py_XDECREF(pFuncLoad); fprintf(stderr, "Cannot convert argument\n"); return false; } PyObject *pValue = PyObject_CallFunctionObjArgs(pFuncLoad, pPath, node->pObj, NULL); if ( PyErr_Occurred() ) PyErr_Print(); Py_DECREF(pPath); Py_DECREF(pFuncLoad); if (pValue != NULL) { // give pValue over to the returned property node bool result = PyObject_IsTrue(pValue); Py_DECREF(pValue); return result; } else { fprintf(stderr,"Call failed\n"); } return false; } bool writeJSON(string filename, pyPropertyNode *node) { // getNode() function PyObject *pFuncSave = PyObject_GetAttrString(pModuleJSON, "save"); if ( PyErr_Occurred() ) PyErr_Print(); if ( pFuncSave == NULL || ! PyCallable_Check(pFuncSave) ) { fprintf(stderr, "Cannot find function 'save()'\n"); return false; } PyObject *pPath = PyBytes_FromString(filename.c_str()); if (!pPath || !node->pObj) { Py_XDECREF(pPath); Py_XDECREF(pFuncSave); fprintf(stderr, "Cannot convert argument\n"); return false; } PyObject *pValue = PyObject_CallFunctionObjArgs(pFuncSave, pPath, node->pObj, NULL); if ( PyErr_Occurred() ) PyErr_Print(); Py_DECREF(pPath); Py_DECREF(pFuncSave); if (pValue != NULL) { // give pValue over to the returned property node bool result = PyObject_IsTrue(pValue); Py_DECREF(pValue); return result; } else { fprintf(stderr,"Call failed\n"); } return false; }<|fim▁end|>
* C++ interface to a python PropertyNode() */ #include "pyprops.h"
<|file_name|>WarehouseProductDAO.java<|end_file_name|><|fim▁begin|>package com.storage.mywarehouse.Dao; import com.storage.mywarehouse.View.WarehouseProduct; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.criterion.Restrictions; import java.util.List; public class WarehouseProductDAO { @SuppressWarnings("unchecked") public static List<WarehouseProduct> findById(int id) { Session session = NewHibernateUtil.getSessionFactory().openSession(); Transaction tx = session.beginTransaction(); List products = session.createCriteria(WarehouseProduct.class) .add(Restrictions.eq("productId", id)) .list(); tx.commit(); session.close(); return products; } @SuppressWarnings("unchecked") public static List<WarehouseProduct> findByQuantity(int quantity) { Session session = NewHibernateUtil.getSessionFactory().openSession(); Transaction tx = session.beginTransaction(); List emptyWarehouseProduct = session.createCriteria(WarehouseProduct.class)<|fim▁hole|> tx.commit(); session.close(); return emptyWarehouseProduct; } @SuppressWarnings("unchecked") public static List<WarehouseProduct> findByParam(String param, String value) { Session session = NewHibernateUtil.getSessionFactory().openSession(); Transaction tx = session.beginTransaction(); List products = session.createCriteria(WarehouseProduct.class) .add(Restrictions.eq(param.toLowerCase(), value)) .list(); tx.commit(); session.close(); return products; } @SuppressWarnings("unchecked") public static List<WarehouseProduct> findByParamContainingValue(String param, String value) { Session session = NewHibernateUtil.getSessionFactory().openSession(); Transaction tx = session.beginTransaction(); List products = session.createCriteria(WarehouseProduct.class) .add(Restrictions.like(param.toLowerCase(), "%" + value + "%")) .list(); tx.commit(); session.close(); return products; } }<|fim▁end|>
.add(Restrictions.eq("quantity", quantity)) .list();
<|file_name|>block.js<|end_file_name|><|fim▁begin|>class Block { constructor(x, y, width, colour) { this.x = x; this.y = y;<|fim▁hole|> this.occupied = false; } draw() { fill(this.colour); rect(this.x, this.y, this.width, this.width); } }<|fim▁end|>
this.width = width; this.colour = colour;
<|file_name|>table.rs<|end_file_name|><|fim▁begin|>use std::cmp::max; use std::env; use std::ops::Deref; use std::sync::{Mutex, MutexGuard}; use datetime::TimeZone; use zoneinfo_compiled::{CompiledData, Result as TZResult}; use lazy_static::lazy_static; use log::*; use users::UsersCache; use crate::fs::{File, fields as f}; use crate::fs::feature::git::GitCache; use crate::output::cell::TextCell; use crate::output::render::TimeRender; use crate::output::time::TimeFormat; use crate::theme::Theme; /// Options for displaying a table. #[derive(PartialEq, Debug)] pub struct Options { pub size_format: SizeFormat, pub time_format: TimeFormat, pub user_format: UserFormat, pub columns: Columns, } /// Extra columns to display in the table. #[allow(clippy::struct_excessive_bools)] #[derive(PartialEq, Debug, Copy, Clone)] pub struct Columns { /// At least one of these timestamps will be shown. pub time_types: TimeTypes, // The rest are just on/off pub inode: bool, pub links: bool, pub blocks: bool, pub group: bool, pub git: bool, pub octal: bool, // Defaults to true: pub permissions: bool, pub filesize: bool, pub user: bool, } impl Columns { pub fn collect(&self, actually_enable_git: bool) -> Vec<Column> { let mut columns = Vec::with_capacity(4); if self.inode { columns.push(Column::Inode); } if self.octal { columns.push(Column::Octal); } if self.permissions { columns.push(Column::Permissions); } if self.links { columns.push(Column::HardLinks); } if self.filesize { columns.push(Column::FileSize); } if self.blocks { columns.push(Column::Blocks); } if self.user { columns.push(Column::User); } if self.group { columns.push(Column::Group); } if self.time_types.modified { columns.push(Column::Timestamp(TimeType::Modified)); } if self.time_types.changed { columns.push(Column::Timestamp(TimeType::Changed)); } if self.time_types.created { columns.push(Column::Timestamp(TimeType::Created)); } if self.time_types.accessed { columns.push(Column::Timestamp(TimeType::Accessed)); } if self.git && actually_enable_git { columns.push(Column::GitStatus); } columns } } /// A table contains these. #[derive(Debug, Copy, Clone)] pub enum Column { Permissions, FileSize, Timestamp(TimeType), Blocks, User, Group, HardLinks, Inode, GitStatus, Octal, } /// Each column can pick its own **Alignment**. Usually, numbers are /// right-aligned, and text is left-aligned. #[derive(Copy, Clone)] pub enum Alignment { Left, Right, } impl Column { /// Get the alignment this column should use. pub fn alignment(self) -> Alignment { match self { Self::FileSize | Self::HardLinks | Self::Inode | Self::Blocks | Self::GitStatus => Alignment::Right, _ => Alignment::Left, } } /// Get the text that should be printed at the top, when the user elects /// to have a header row printed. pub fn header(self) -> &'static str { match self { Self::Permissions => "Permissions", Self::FileSize => "Size", Self::Timestamp(t) => t.header(), Self::Blocks => "Blocks", Self::User => "User", Self::Group => "Group", Self::HardLinks => "Links", Self::Inode => "inode", Self::GitStatus => "Git", Self::Octal => "Octal", } } } /// Formatting options for file sizes. #[allow(clippy::pub_enum_variant_names)] #[derive(PartialEq, Debug, Copy, Clone)] pub enum SizeFormat { /// Format the file size using **decimal** prefixes, such as “kilo”, /// “mega”, or “giga”. DecimalBytes, /// Format the file size using **binary** prefixes, such as “kibi”, /// “mebi”, or “gibi”. BinaryBytes, /// Do no formatting and just display the size as a number of bytes. JustBytes, } /// Formatting options for user and group. #[derive(PartialEq, Debug, Copy, Clone)] pub enum UserFormat { /// The UID / GID Numeric, /// Show the name Name, } impl Default for SizeFormat { fn default() -> Self { Self::DecimalBytes } } /// The types of a file’s time fields. These three fields are standard /// across most (all?) operating systems. #[derive(PartialEq, Debug, Copy, Clone)] pub enum TimeType { /// The file’s modified time (`st_mtime`). Modified, /// The file’s changed time (`st_ctime`) Changed, /// The file’s accessed time (`st_atime`). Accessed, /// The file’s creation time (`btime` or `birthtime`). Created, } impl TimeType { /// Returns the text to use for a column’s heading in the columns output. pub fn header(self) -> &'static str { match self { Self::Modified => "Date Modified", Self::Changed => "Date Changed", Self::Accessed => "Date Accessed", Self::Created => "Date Created", } } } /// Fields for which of a file’s time fields should be displayed in the /// columns output. /// /// There should always be at least one of these — there’s no way to disable /// the time columns entirely (yet). #[derive(PartialEq, Debug, Copy, Clone)] #[allow(clippy::struct_excessive_bools)] pub struct TimeTypes { pub modified: bool, pub changed: bool, pub accessed: bool, pub created: bool, } impl Default for TimeTypes { /// By default, display just the ‘modified’ time. This is the most /// common option, which is why it has this shorthand. fn default() -> Self { Self { modified: true, changed: false, accessed: false, created: false, } } } /// The **environment** struct contains any data that could change between /// running instances of exa, depending on the user’s computer’s configuration. /// /// Any environment field should be able to be mocked up for test runs. pub struct Environment { /// Localisation rules for formatting numbers. numeric: locale::Numeric, /// The computer’s current time zone. This gets used to determine how to /// offset files’ timestamps. tz: Option<TimeZone>, /// Mapping cache of user IDs to usernames. users: Mutex<UsersCache>, } impl Environment { pub fn lock_users(&self) -> MutexGuard<'_, UsersCache> { self.users.lock().unwrap() } fn load_all() -> Self { let tz = match determine_time_zone() { Ok(t) => { Some(t) } Err(ref e) => { println!("Unable to determine time zone: {}", e); None } }; let numeric = locale::Numeric::load_user_locale() .unwrap_or_else(|_| locale::Numeric::english()); let users = Mutex::new(UsersCache::new()); Self { numeric, tz, users } } } fn determine_time_zone() -> TZResult<TimeZone> { if let Ok(file) = env::var("TZ") { TimeZone::from_file({ if file.starts_with('/') { file } else { format!("/usr/share/zoneinfo/{}", { if file.starts_with(':') { file.replacen(":", "", 1) } else { file } }) } }) } else { TimeZone::from_file("/etc/localtime") } } lazy_static! { static ref ENVIRONMENT: Environment = Environment::load_all(); } pub struct Table<'a> { columns: Vec<Column>, theme: &'a Theme, env: &'a Environment, widths: TableWidths, time_format: TimeFormat, size_format: SizeFormat, user_format: UserFormat, git: Option<&'a GitCache>, } #[derive(Clone)] pub struct Row { cells: Vec<TextCell>, } impl<'a, 'f> Table<'a> { pub fn new(options: &'a Options, git: Option<&'a GitCache>, theme: &'a Theme) -> Table<'a> { let columns = options.columns.collect(git.is_some()); let widths = TableWidths::zero(columns.len()); let env = &*ENVIRONMENT; Table { theme, widths, columns, git, env, time_format: options.time_format, size_format: options.size_format, user_format: options.user_format, }<|fim▁hole|> &self.widths } pub fn header_row(&self) -> Row { let cells = self.columns.iter() .map(|c| TextCell::paint_str(self.theme.ui.header, c.header())) .collect(); Row { cells } } pub fn row_for_file(&self, file: &File<'_>, xattrs: bool) -> Row { let cells = self.columns.iter() .map(|c| self.display(file, *c, xattrs)) .collect(); Row { cells } } pub fn add_widths(&mut self, row: &Row) { self.widths.add_widths(row) } fn permissions_plus(&self, file: &File<'_>, xattrs: bool) -> f::PermissionsPlus { f::PermissionsPlus { file_type: file.type_char(), permissions: file.permissions(), xattrs, } } fn octal_permissions(&self, file: &File<'_>) -> f::OctalPermissions { f::OctalPermissions { permissions: file.permissions(), } } fn display(&self, file: &File<'_>, column: Column, xattrs: bool) -> TextCell { match column { Column::Permissions => { self.permissions_plus(file, xattrs).render(self.theme) } Column::FileSize => { file.size().render(self.theme, self.size_format, &self.env.numeric) } Column::HardLinks => { file.links().render(self.theme, &self.env.numeric) } Column::Inode => { file.inode().render(self.theme.ui.inode) } Column::Blocks => { file.blocks().render(self.theme) } Column::User => { file.user().render(self.theme, &*self.env.lock_users(), self.user_format) } Column::Group => { file.group().render(self.theme, &*self.env.lock_users(), self.user_format) } Column::GitStatus => { self.git_status(file).render(self.theme) } Column::Octal => { self.octal_permissions(file).render(self.theme.ui.octal) } Column::Timestamp(TimeType::Modified) => { file.modified_time().render(self.theme.ui.date, &self.env.tz, self.time_format) } Column::Timestamp(TimeType::Changed) => { file.changed_time().render(self.theme.ui.date, &self.env.tz, self.time_format) } Column::Timestamp(TimeType::Created) => { file.created_time().render(self.theme.ui.date, &self.env.tz, self.time_format) } Column::Timestamp(TimeType::Accessed) => { file.accessed_time().render(self.theme.ui.date, &self.env.tz, self.time_format) } } } fn git_status(&self, file: &File<'_>) -> f::Git { debug!("Getting Git status for file {:?}", file.path); self.git .map(|g| g.get(&file.path, file.is_directory())) .unwrap_or_default() } pub fn render(&self, row: Row) -> TextCell { let mut cell = TextCell::default(); let iter = row.cells.into_iter() .zip(self.widths.iter()) .enumerate(); for (n, (this_cell, width)) in iter { let padding = width - *this_cell.width; match self.columns[n].alignment() { Alignment::Left => { cell.append(this_cell); cell.add_spaces(padding); } Alignment::Right => { cell.add_spaces(padding); cell.append(this_cell); } } cell.add_spaces(1); } cell } } pub struct TableWidths(Vec<usize>); impl Deref for TableWidths { type Target = [usize]; fn deref(&self) -> &Self::Target { &self.0 } } impl TableWidths { pub fn zero(count: usize) -> Self { Self(vec![0; count]) } pub fn add_widths(&mut self, row: &Row) { for (old_width, cell) in self.0.iter_mut().zip(row.cells.iter()) { *old_width = max(*old_width, *cell.width); } } pub fn total(&self) -> usize { self.0.len() + self.0.iter().sum::<usize>() } }<|fim▁end|>
} pub fn widths(&self) -> &TableWidths {
<|file_name|>get_snmp_name_desc.py<|end_file_name|><|fim▁begin|>''' get_snmp_name_desc.py ''' from snmp_helper import snmp_get_oid,snmp_extract PORT=161 COMMUNITY='galileo'<|fim▁hole|>for rtr in rtrs.keys(): print rtr for oid in oids.keys(): print " " + oid + " = " + snmp_extract(snmp_get_oid((rtrs[rtr],COMMUNITY,PORT),oids[oid]))<|fim▁end|>
rtrs={'pynet-rtr1':'184.105.247.70', 'pynet-rtr2':'184.105.247.71'} oids={'sysName':'1.3.6.1.2.1.1.5.0', 'sysDescr':'1.3.6.1.2.1.1.1.0'}
<|file_name|>comments.py<|end_file_name|><|fim▁begin|>from django.shortcuts import redirect from portfolio.models.comments import PhotoComment from portfolio.models.photos import Photo from portfolio.views.base import AuthenticatedView class CommentPhotoView(AuthenticatedView): """ View that handles commenting on a photo """ def post(self, request): comment_content = request.POST.get('comment', '') photo = request.POST.get('photo', 0) if comment_content and photo: comment = PhotoComment( photo=Photo.objects.get(id=photo), owner=request.user, content=comment_content ) comment.save() if not photo: return redirect('portfolio.home') <|fim▁hole|><|fim▁end|>
return redirect('portfolio.photo.view', photo_id=photo)
<|file_name|>log.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> * * You can copy, redistribute or modify this Program under the terms of * the GNU General Public License version 2 as published by the Free * Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * version 2 along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ // written by Pierre Chifflier <[email protected]> use json::*; use ikev2::ikev2::{IKEV2State,IKEV2Transaction}; use ikev2::ipsec_parser::IKEV2_FLAG_INITIATOR; #[no_mangle] pub extern "C" fn rs_ikev2_log_json_response(state: &mut IKEV2State, tx: &mut IKEV2Transaction) -> *mut JsonT { let js = Json::object(); js.set_integer("version_major", tx.hdr.maj_ver as u64); js.set_integer("version_minor", tx.hdr.min_ver as u64); js.set_integer("exchange_type", tx.hdr.exch_type.0 as u64); js.set_integer("message_id", tx.hdr.msg_id as u64); js.set_string("init_spi", &format!("{:016x}", tx.hdr.init_spi)); js.set_string("resp_spi", &format!("{:016x}", tx.hdr.resp_spi)); if tx.hdr.flags & IKEV2_FLAG_INITIATOR != 0 { js.set_string("role", &"initiator"); } else { js.set_string("role", &"responder"); js.set_string("alg_enc", &format!("{:?}", state.alg_enc)); js.set_string("alg_auth", &format!("{:?}", state.alg_auth)); js.set_string("alg_prf", &format!("{:?}", state.alg_prf)); js.set_string("alg_dh", &format!("{:?}", state.alg_dh)); js.set_string("alg_esn", &format!("{:?}", state.alg_esn)); } js.set_integer("errors", tx.errors as u64); let jsa = Json::array(); for payload in tx.payload_types.iter() { jsa.array_append_string(&format!("{:?}", payload)); } js.set("payload", jsa); let jsa = Json::array(); for notify in tx.notify_types.iter() { jsa.array_append_string(&format!("{:?}", notify)); } js.set("notify", jsa); return js.unwrap(); }<|fim▁end|>
/* Copyright (C) 2018 Open Information Security Foundation
<|file_name|>nasm_spec.js<|end_file_name|><|fim▁begin|>var expect = require('chai').expect; var runner = require('../runner'); describe('nasm runner', function() { describe('.run', function() { it('should handle basic code evaluation (no libc)', function(done) { runner.run({ language: 'nasm', code: [ ' global _start', ' section .text', '_start:', ' mov rax, 1', ' mov rdi, 1', ' mov rsi, message',<|fim▁hole|> ' syscall', 'message:', 'db "Hello, Netwide Assembler!", 25' ].join('\n') }, function(buffer) { expect(buffer.stdout).to.equal('Hello, Netwide Assembler!'); done(); }); }); it('should handle basic code evaluation (with libc)', function(done) { runner.run({ language: 'nasm', code: [ ' global main', ' extern puts', ' section .text', 'main:', ' mov rdi, message', ' call puts', ' ret', 'message:', 'db "Netwide Assembler together with LIBC! Let\'s Port Codewars From Rails to THIS! \\m/", 0' ].join('\n') }, function(buffer) { expect(buffer.stdout).to.equal('Netwide Assembler together with LIBC! Let\'s Port Codewars From Rails to THIS! \\m/\n'); done(); }); }); }); });<|fim▁end|>
' mov rdx, 25', ' syscall', ' mov eax, 60', ' xor rdi, rdi',
<|file_name|>method.go<|end_file_name|><|fim▁begin|>package main import ( "go/ast" "go/token" "strings" ) type method struct { name *ast.Ident params []arg results []arg structsResolved bool } func (m method) definition(ifc iface) ast.Decl { notImpl := fetchFuncDecl("ExampleEndpoint") notImpl.Name = m.name notImpl.Recv = fieldList(ifc.reciever()) scope := scopeWith(notImpl.Recv.List[0].Names[0].Name) notImpl.Type.Params = m.funcParams(scope) notImpl.Type.Results = m.funcResults() return notImpl } func (m method) endpointMaker(ifc iface) ast.Decl { endpointFn := fetchFuncDecl("makeExampleEndpoint") scope := scopeWith("ctx", "req", ifc.receiverName().Name) anonFunc := endpointFn.Body.List[0].(*ast.ReturnStmt).Results[0].(*ast.FuncLit) if !m.hasContext() { // strip context param from endpoint function anonFunc.Type.Params.List = anonFunc.Type.Params.List[1:] } anonFunc = replaceIdent(anonFunc, "ExampleRequest", m.requestStructName()).(*ast.FuncLit) callMethod := m.called(ifc, scope, "ctx", "req") anonFunc.Body.List[1] = callMethod anonFunc.Body.List[2].(*ast.ReturnStmt).Results[0] = m.wrapResult(callMethod.Lhs) endpointFn.Body.List[0].(*ast.ReturnStmt).Results[0] = anonFunc endpointFn.Name = m.endpointMakerName() endpointFn.Type.Params = fieldList(ifc.reciever()) endpointFn.Type.Results = fieldList(typeField(sel(id("endpoint"), id("Endpoint")))) return endpointFn } func (m method) pathName() string { return "/" + strings.ToLower(m.name.Name) } func (m method) encodeFuncName() *ast.Ident { return id("Encode" + m.name.Name + "Response") } func (m method) decodeFuncName() *ast.Ident { return id("Decode" + m.name.Name + "Request") } func (m method) resultNames(scope *ast.Scope) []*ast.Ident { ids := []*ast.Ident{} for _, rz := range m.results { ids = append(ids, rz.chooseName(scope)) } return ids } func (m method) called(ifc iface, scope *ast.Scope, ctxName, spreadStruct string) *ast.AssignStmt { m.resolveStructNames() resNamesExpr := []ast.Expr{} for _, r := range m.resultNames(scope) { resNamesExpr = append(resNamesExpr, ast.Expr(r)) } arglist := []ast.Expr{} if m.hasContext() { arglist = append(arglist, id(ctxName)) } ssid := id(spreadStruct) for _, f := range m.requestStructFields().List { arglist = append(arglist, sel(ssid, f.Names[0])) } return &ast.AssignStmt{ Lhs: resNamesExpr, Tok: token.DEFINE, Rhs: []ast.Expr{ &ast.CallExpr{ Fun: sel(ifc.receiverName(), m.name), Args: arglist, }, }, } } func (m method) wrapResult(results []ast.Expr) ast.Expr { kvs := []ast.Expr{} m.resolveStructNames() for i, a := range m.results { kvs = append(kvs, &ast.KeyValueExpr{ Key: ast.NewIdent(export(a.asField.Name)), Value: results[i], }) } return &ast.CompositeLit{ Type: m.responseStructName(), Elts: kvs, } } func (m method) resolveStructNames() { if m.structsResolved { return } m.structsResolved = true scope := ast.NewScope(nil) for i, p := range m.params { p.asField = p.chooseName(scope) m.params[i] = p } scope = ast.NewScope(nil) for i, r := range m.results { r.asField = r.chooseName(scope) m.results[i] = r } } func (m method) decoderFunc() ast.Decl { fn := fetchFuncDecl("DecodeExampleRequest") fn.Name = m.decodeFuncName() fn = replaceIdent(fn, "ExampleRequest", m.requestStructName()).(*ast.FuncDecl) return fn } func (m method) encoderFunc() ast.Decl { fn := fetchFuncDecl("EncodeExampleResponse") fn.Name = m.encodeFuncName() return fn } func (m method) endpointMakerName() *ast.Ident { return id("Make" + m.name.Name + "Endpoint") } func (m method) requestStruct() ast.Decl { m.resolveStructNames() return structDecl(m.requestStructName(), m.requestStructFields()) } func (m method) responseStruct() ast.Decl { m.resolveStructNames() return structDecl(m.responseStructName(), m.responseStructFields()) } func (m method) hasContext() bool { if len(m.params) < 1 { return false } carg := m.params[0].typ // ugh. this is maybe okay for the one-off, but a general case for matching // types would be helpful if sel, is := carg.(*ast.SelectorExpr); is && sel.Sel.Name == "Context" { if id, is := sel.X.(*ast.Ident); is && id.Name == "context" { return true } } return false } func (m method) nonContextParams() []arg { if m.hasContext() { return m.params[1:] } return m.params } func (m method) funcParams(scope *ast.Scope) *ast.FieldList { parms := &ast.FieldList{} if m.hasContext() { parms.List = []*ast.Field{{ Names: []*ast.Ident{ast.NewIdent("ctx")}, Type: sel(id("context"), id("Context")), }} scope.Insert(ast.NewObj(ast.Var, "ctx")) } parms.List = append(parms.List, mappedFieldList(func(a arg) *ast.Field { return a.field(scope) }, m.nonContextParams()...).List...) return parms } func (m method) funcResults() *ast.FieldList { return mappedFieldList(func(a arg) *ast.Field { return a.result() }, m.results...) } func (m method) requestStructName() *ast.Ident { return id(export(m.name.Name) + "Request") } func (m method) requestStructFields() *ast.FieldList { return mappedFieldList(func(a arg) *ast.Field {<|fim▁hole|> return a.exported() }, m.nonContextParams()...) } func (m method) responseStructName() *ast.Ident { return id(export(m.name.Name) + "Response") } func (m method) responseStructFields() *ast.FieldList { return mappedFieldList(func(a arg) *ast.Field { return a.exported() }, m.results...) }<|fim▁end|>
<|file_name|>reservation.js<|end_file_name|><|fim▁begin|>import React, {Component} from 'react'; import {View, Text} from 'react-native'; import {xdateToData} from '../../interface'; import XDate from 'xdate'; import dateutils from '../../dateutils'; import styleConstructor from './style'; class ReservationListItem extends Component { constructor(props) { super(props); this.styles = styleConstructor(props.theme); } shouldComponentUpdate(nextProps) { const r1 = this.props.item; const r2 = nextProps.item; let changed = true; if (!r1 && !r2) { changed = false; } else if (r1 && r2) { if (r1.day.getTime() !== r2.day.getTime()) { changed = true; } else if (!r1.reservation && !r2.reservation) { changed = false; } else if (r1.reservation && r2.reservation) { if ((!r1.date && !r2.date) || (r1.date && r2.date)) { changed = this.props.rowHasChanged(r1.reservation, r2.reservation); } } } return changed; } renderDate(date, item) { if (this.props.renderDay) { return this.props.renderDay(date ? xdateToData(date) : undefined, item); } const today = dateutils.sameDate(date, XDate()) ? this.styles.today : undefined; if (date) { return ( <View style={this.styles.day}> <Text style={[this.styles.dayNum, today]}>{date.getDate()}</Text> <Text style={[this.styles.dayText, today]}>{XDate.locales[XDate.defaultLocale].dayNamesShort[date.getDay()]}</Text> </View> ); } else { return ( <View style={this.styles.day}/> ); } } render() { const {reservation, date} = this.props.item; let content; if (reservation) {<|fim▁hole|> } else { content = this.props.renderEmptyDate(date); } return ( <View style={this.styles.container}> {this.renderDate(date, reservation)} <View style={{flex:1}}> {content} </View> </View> ); } } export default ReservationListItem;<|fim▁end|>
const firstItem = date ? true : false; content = this.props.renderItem(reservation, firstItem);
<|file_name|>GetFormat.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test @summary Test SoftAudioSynthesizer getFormat method */ import javax.sound.midi.MidiUnavailableException; import javax.sound.midi.Patch; import javax.sound.sampled.*; import com.sun.media.sound.*; public class GetFormat { <|fim▁hole|> { if(!a.equals(b)) throw new RuntimeException("assertEquals fails!"); } private static void assertTrue(boolean value) throws Exception { if(!value) throw new RuntimeException("assertTrue fails!"); } public static void main(String[] args) throws Exception { AudioSynthesizer synth = new SoftSynthesizer(); AudioFormat defformat = synth.getFormat(); assertTrue(defformat != null); synth.openStream(null, null); assertTrue(synth.getFormat().toString().equals(defformat.toString())); synth.close(); AudioFormat custformat = new AudioFormat(8000, 16, 1, true, false); synth.openStream(custformat, null); assertTrue(synth.getFormat().toString().equals(custformat.toString())); synth.close(); } }<|fim▁end|>
private static void assertEquals(Object a, Object b) throws Exception
<|file_name|>PatchFileType.java<|end_file_name|><|fim▁begin|>/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> */ /* * Created by IntelliJ IDEA. * User: yole * Date: 17.11.2006 * Time: 17:36:42 */ package com.intellij.openapi.vcs.changes.patch; import com.intellij.icons.AllIcons; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.vcs.VcsBundle; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; public class PatchFileType implements FileType { public static final PatchFileType INSTANCE = new PatchFileType(); public static final String NAME = "PATCH"; @NotNull @NonNls public String getName() { return NAME; } @NotNull public String getDescription() { return VcsBundle.message("patch.file.type.description"); } @NotNull @NonNls public String getDefaultExtension() { return "patch"; } @Nullable public Icon getIcon() { return AllIcons.Nodes.Pointcut; } public boolean isBinary() { return false; } public boolean isReadOnly() { return false; } @Nullable @NonNls public String getCharset(@NotNull VirtualFile file, final byte[] content) { return null; } }<|fim▁end|>
* See the License for the specific language governing permissions and * limitations under the License.
<|file_name|>router.js<|end_file_name|><|fim▁begin|>import React from 'react'; import { applyRouterMiddleware, Router, Route } from 'dva/router'; import { useScroll } from 'react-router-scroll'; import App from '@/app/App';<|fim▁hole|> function RouterConfig({ history }) { return ( <Router history={history} render={applyRouterMiddleware(useScroll())}> <Route path="/" component={App}></Route> </Router> ); } export default RouterConfig;<|fim▁end|>
<|file_name|>test_decorators.py<|end_file_name|><|fim▁begin|>from datetime import datetime, timedelta from django import http from django.conf import settings from django.core.exceptions import PermissionDenied import mock import pytest from olympia.amo.tests import BaseTestCase, TestCase from olympia.amo import decorators, get_user, set_user from olympia.amo.urlresolvers import reverse from olympia.users.models import UserProfile pytestmark = pytest.mark.django_db def test_post_required(): def func(request): return mock.sentinel.response g = decorators.post_required(func) request = mock.Mock() request.method = 'GET' assert isinstance(g(request), http.HttpResponseNotAllowed) request.method = 'POST' assert g(request) == mock.sentinel.response def test_json_view(): """Turns a Python object into a response.""" def func(request): return {'x': 1} response = decorators.json_view(func)(mock.Mock()) assert isinstance(response, http.HttpResponse) assert response.content == '{"x": 1}' assert response['Content-Type'] == 'application/json' assert response.status_code == 200 def test_json_view_normal_response(): """Normal responses get passed through.""" expected = http.HttpResponseForbidden() def func(request): return expected response = decorators.json_view(func)(mock.Mock()) assert expected is response assert response['Content-Type'] == 'text/html; charset=utf-8' def test_json_view_error(): """json_view.error returns 400 responses.""" response = decorators.json_view.error({'msg': 'error'}) assert isinstance(response, http.HttpResponseBadRequest) assert response.content == '{"msg": "error"}' assert response['Content-Type'] == 'application/json' def test_json_view_status(): def func(request): return {'x': 1} response = decorators.json_view(func, status_code=202)(mock.Mock()) assert response.status_code == 202 def test_json_view_response_status(): response = decorators.json_response({'msg': 'error'}, status_code=202) assert response.content == '{"msg": "error"}' assert response['Content-Type'] == 'application/json' assert response.status_code == 202 class TestTaskUser(TestCase): fixtures = ['base/users'] def test_set_task_user(self): @decorators.set_task_user def some_func(): return get_user() set_user(UserProfile.objects.get(username='regularuser')) assert get_user().pk == 999 assert some_func().pk == int(settings.TASK_USER_ID) assert get_user().pk == 999 class TestLoginRequired(BaseTestCase): def setUp(self): super(TestLoginRequired, self).setUp() self.f = mock.Mock() self.f.__name__ = 'function' self.request = mock.Mock() self.request.user.is_authenticated.return_value = False self.request.get_full_path.return_value = 'path' def test_normal(self): func = decorators.login_required(self.f) response = func(self.request) assert not self.f.called assert response.status_code == 302 assert response['Location'] == ( '%s?to=%s' % (reverse('users.login'), 'path')) def test_no_redirect(self): func = decorators.login_required(self.f, redirect=False) response = func(self.request) assert not self.f.called assert response.status_code == 401 def test_decorator_syntax(self): # @login_required(redirect=False) func = decorators.login_required(redirect=False)(self.f) response = func(self.request) assert not self.f.called assert response.status_code == 401 def test_no_redirect_success(self): func = decorators.login_required(redirect=False)(self.f) self.request.user.is_authenticated.return_value = True func(self.request) assert self.f.called class TestSetModifiedOn(TestCase): fixtures = ['base/users'] @decorators.set_modified_on def some_method(self, worked): return worked def test_set_modified_on(self): users = list(UserProfile.objects.all()[:3]) self.some_method(True, set_modified_on=users) for user in users: assert UserProfile.objects.get(pk=user.pk).modified.date() == ( datetime.today().date()) <|fim▁hole|> qs.update(modified=yesterday) users = list(qs[:3]) self.some_method(False, set_modified_on=users) for user in users: date = UserProfile.objects.get(pk=user.pk).modified.date() assert date < datetime.today().date() class TestPermissionRequired(TestCase): def setUp(self): super(TestPermissionRequired, self).setUp() self.f = mock.Mock() self.f.__name__ = 'function' self.request = mock.Mock() @mock.patch('olympia.access.acl.action_allowed') def test_permission_not_allowed(self, action_allowed): action_allowed.return_value = False func = decorators.permission_required('', '')(self.f) with self.assertRaises(PermissionDenied): func(self.request) @mock.patch('olympia.access.acl.action_allowed') def test_permission_allowed(self, action_allowed): action_allowed.return_value = True func = decorators.permission_required('', '')(self.f) func(self.request) assert self.f.called @mock.patch('olympia.access.acl.action_allowed') def test_permission_allowed_correctly(self, action_allowed): func = decorators.permission_required('Admin', '%')(self.f) func(self.request) action_allowed.assert_called_with(self.request, 'Admin', '%')<|fim▁end|>
def test_not_set_modified_on(self): yesterday = datetime.today() - timedelta(days=1) qs = UserProfile.objects.all()
<|file_name|>OpenHillShade_September_noon_Listener.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
energymodels.OpenHillShade_September_noon_Listener
<|file_name|>run_test.go<|end_file_name|><|fim▁begin|>package integration_test import ( "fmt" "regexp" "runtime" "strings" . "github.com/onsi/ginkgo" "github.com/onsi/ginkgo/types" . "github.com/onsi/gomega" "github.com/onsi/gomega/gbytes" "github.com/onsi/gomega/gexec" ) var _ = Describe("Running Specs", func() { var pathToTest string isWindows := (runtime.GOOS == "windows") denoter := "•" if isWindows { denoter = "+" } Context("when pointed at the current directory", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") copyIn("passing_ginkgo_tests", pathToTest) }) It("should run the tests in the working directory", func() { session := startGinkgo(pathToTest, "--noColor") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring(strings.Repeat(denoter, 4))) Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed")) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) Context("when passed an explicit package to run", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") copyIn("passing_ginkgo_tests", pathToTest) }) It("should run the ginkgo style tests", func() { session := startGinkgo(tmpDir, "--noColor", pathToTest) Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring(strings.Repeat(denoter, 4))) Ω(output).Should(ContainSubstring("SUCCESS! -- 4 Passed")) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) Context("when passed a number of packages to run", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") otherPathToTest := tmpPath("other") copyIn("passing_ginkgo_tests", pathToTest) copyIn("more_ginkgo_tests", otherPathToTest) }) It("should run the ginkgo style tests", func() { session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "ginkgo", "./other") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) Context("when passed a number of packages to run, some of which have focused tests", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") otherPathToTest := tmpPath("other") focusedPathToTest := tmpPath("focused") copyIn("passing_ginkgo_tests", pathToTest) copyIn("more_ginkgo_tests", otherPathToTest) copyIn("focused_fixture", focusedPathToTest) }) It("should exit with a status code of 2 and explain why", func() { session := startGinkgo(tmpDir, "--noColor", "--succinct=false", "-r") Eventually(session).Should(gexec.Exit(types.GINKGO_FOCUS_EXIT_CODE)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("Running Suite: Passing_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring("Running Suite: More_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring("Test Suite Passed")) Ω(output).Should(ContainSubstring("Detected Programmatic Focus - setting exit status to %d", types.GINKGO_FOCUS_EXIT_CODE)) }) }) Context("when told to skipPackages", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") otherPathToTest := tmpPath("other") focusedPathToTest := tmpPath("focused") copyIn("passing_ginkgo_tests", pathToTest) copyIn("more_ginkgo_tests", otherPathToTest) copyIn("focused_fixture", focusedPathToTest) }) It("should skip packages that match the list", func() { session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("Passing_ginkgo_tests Suite")) Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite")) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) Context("when all packages are skipped", func() { It("should not run anything, but still exit 0", func() { session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused,ginkgo", "-r") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("All tests skipped!")) Ω(output).ShouldNot(ContainSubstring("Passing_ginkgo_tests Suite")) Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) Ω(output).ShouldNot(ContainSubstring("Focused_fixture Suite")) Ω(output).ShouldNot(ContainSubstring("Test Suite Passed")) }) }) }) Context("when there are no tests to run", func() { It("should exit 1", func() { session := startGinkgo(tmpDir, "--noColor", "--skipPackage=other,focused", "-r") Eventually(session).Should(gexec.Exit(1)) output := string(session.Err.Contents()) Ω(output).Should(ContainSubstring("Found no test suites")) }) }) Context("when told to randomizeSuites", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") otherPathToTest := tmpPath("other") copyIn("passing_ginkgo_tests", pathToTest) copyIn("more_ginkgo_tests", otherPathToTest) }) It("should skip packages that match the regexp", func() { session := startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=2") Eventually(session).Should(gexec.Exit(0)) Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite")) Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite")) session = startGinkgo(tmpDir, "--noColor", "--randomizeSuites", "-r", "--seed=3") Eventually(session).Should(gexec.Exit(0)) Ω(session).Should(gbytes.Say("Passing_ginkgo_tests Suite")) Ω(session).Should(gbytes.Say("More_ginkgo_tests Suite")) }) }) Context("when pointed at a package with xunit style tests", func() { BeforeEach(func() { pathToTest = tmpPath("xunit") copyIn("xunit_tests", pathToTest) }) It("should run the xunit style tests", func() { session := startGinkgo(pathToTest) Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("--- PASS: TestAlwaysTrue")) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) Context("when pointed at a package with no tests", func() { BeforeEach(func() { pathToTest = tmpPath("no_tests") copyIn("no_tests", pathToTest) }) It("should fail", func() { session := startGinkgo(pathToTest, "--noColor") Eventually(session).Should(gexec.Exit(1)) Ω(session.Err.Contents()).Should(ContainSubstring("Found no test suites")) }) }) Context("when pointed at a package that fails to compile", func() { BeforeEach(func() { pathToTest = tmpPath("does_not_compile") copyIn("does_not_compile", pathToTest) }) It("should fail", func() { session := startGinkgo(pathToTest, "--noColor") Eventually(session).Should(gexec.Exit(1)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring("Failed to compile")) }) }) Context("when running in parallel", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") copyIn("passing_ginkgo_tests", pathToTest) }) Context("with a specific number of -nodes", func() { It("should use the specified number of nodes", func() { session := startGinkgo(pathToTest, "--noColor", "-succinct", "-nodes=2") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - 2 nodes [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s`, regexp.QuoteMeta(denoter))) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) Context("with -p", func() { It("it should autocompute the number of nodes", func() { session := startGinkgo(pathToTest, "--noColor", "-succinct", "-p") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) nodes := runtime.NumCPU() if nodes == 1 { Skip("Can't test parallel testings with 1 CPU") } if nodes > 4 { nodes = nodes - 1 } Ω(output).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs - %d nodes [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s`, nodes, regexp.QuoteMeta(denoter))) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) }) Context("when streaming in parallel", func() { BeforeEach(func() { pathToTest = tmpPath("ginkgo") copyIn("passing_ginkgo_tests", pathToTest) }) It("should print output in realtime", func() { session := startGinkgo(pathToTest, "--noColor", "-stream", "-nodes=2") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) Ω(output).Should(ContainSubstring(`[1] Parallel test node 1/2.`)) Ω(output).Should(ContainSubstring(`[2] Parallel test node 2/2.`)) Ω(output).Should(ContainSubstring(`[1] SUCCESS!`)) Ω(output).Should(ContainSubstring(`[2] SUCCESS!`)) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) Context("when running recursively", func() { BeforeEach(func() { passingTest := tmpPath("A") otherPassingTest := tmpPath("E") copyIn("passing_ginkgo_tests", passingTest) copyIn("more_ginkgo_tests", otherPassingTest) }) Context("when all the tests pass", func() { Context("with the -r flag", func() { It("should run all the tests (in succinct mode) and succeed", func() { session := startGinkgo(tmpDir, "--noColor", "-r", ".") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) outputLines := strings.Split(output, "\n") Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs [%s]{2} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) Context("with a trailing /...", func() { It("should run all the tests (in succinct mode) and succeed", func() { session := startGinkgo(tmpDir, "--noColor", "./...") Eventually(session).Should(gexec.Exit(0)) output := string(session.Out.Contents()) outputLines := strings.Split(output, "\n") Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs [%s]{2} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(output).Should(ContainSubstring("Test Suite Passed")) }) }) }) Context("when one of the packages has a failing tests", func() { BeforeEach(func() { failingTest := tmpPath("C") copyIn("failing_ginkgo_tests", failingTest) }) It("should fail and stop running tests", func() { session := startGinkgo(tmpDir, "--noColor", "-r") Eventually(session).Should(gexec.Exit(1)) output := string(session.Out.Contents()) outputLines := strings.Split(output, "\n") Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(outputLines[1]).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`)) Ω(output).Should(ContainSubstring(fmt.Sprintf("%s Failure", denoter))) Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring("Test Suite Failed")) Ω(output).Should(ContainSubstring("Summarizing 1 Failure:")) Ω(output).Should(ContainSubstring("[Fail] FailingGinkgoTests [It] should fail")) }) }) Context("when one of the packages fails to compile", func() { BeforeEach(func() { doesNotCompileTest := tmpPath("C") copyIn("does_not_compile", doesNotCompileTest) }) It("should fail and stop running tests", func() { session := startGinkgo(tmpDir, "--noColor", "-r") Eventually(session).Should(gexec.Exit(1)) output := string(session.Out.Contents()) outputLines := strings.Split(output, "\n") Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(outputLines[1]).Should(ContainSubstring("Failed to compile C:")) Ω(output).ShouldNot(ContainSubstring("More_ginkgo_tests Suite")) Ω(output).Should(ContainSubstring("Test Suite Failed")) }) }) Context("when either is the case, but the keepGoing flag is set", func() { BeforeEach(func() { doesNotCompileTest := tmpPath("B") copyIn("does_not_compile", doesNotCompileTest) failingTest := tmpPath("C") copyIn("failing_ginkgo_tests", failingTest) }) It("should soldier on", func() { session := startGinkgo(tmpDir, "--noColor", "-r", "-keepGoing") Eventually(session).Should(gexec.Exit(1)) output := string(session.Out.Contents()) outputLines := strings.Split(output, "\n") Ω(outputLines[0]).Should(MatchRegexp(`\[\d+\] Passing_ginkgo_tests Suite - 4/4 specs [%s]{4} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(outputLines[1]).Should(ContainSubstring("Failed to compile B:")) Ω(output).Should(MatchRegexp(`\[\d+\] Failing_ginkgo_tests Suite - 2/2 specs`)) Ω(output).Should(ContainSubstring(fmt.Sprintf("%s Failure", denoter))) Ω(output).Should(MatchRegexp(`\[\d+\] More_ginkgo_tests Suite - 2/2 specs [%s]{2} SUCCESS! \d+(\.\d+)?[muµ]s PASS`, regexp.QuoteMeta(denoter))) Ω(output).Should(ContainSubstring("Test Suite Failed")) }) }) }) Context("when told to keep going --untilItFails", func() { BeforeEach(func() { copyIn("eventually_failing", tmpDir) }) It("should keep rerunning the tests, until a failure occurs", func() { session := startGinkgo(tmpDir, "--untilItFails", "--noColor") Eventually(session).Should(gexec.Exit(1)) Ω(session).Should(gbytes.Say("This was attempt #1")) Ω(session).Should(gbytes.Say("This was attempt #2")) Ω(session).Should(gbytes.Say("Tests failed on attempt #3")) //it should change the random seed between each test lines := strings.Split(string(session.Out.Contents()), "\n") randomSeeds := []string{} for _, line := range lines {<|fim▁hole|> } Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[1])) Ω(randomSeeds[1]).ShouldNot(Equal(randomSeeds[2])) Ω(randomSeeds[0]).ShouldNot(Equal(randomSeeds[2])) }) }) })<|fim▁end|>
if strings.Contains(line, "Random Seed:") { randomSeeds = append(randomSeeds, strings.Split(line, ": ")[1]) }
<|file_name|>poll_token_derive.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #![recursion_limit = "128"] extern crate proc_macro; use proc_macro2::{Ident, TokenStream}; use quote::quote; use syn::{parse_macro_input, Data, DeriveInput, Field, Fields, Index, Member, Variant}; #[cfg(test)] mod tests; // The method for packing an enum into a u64 is as follows: // 1) Reserve the lowest "ceil(log_2(x))" bits where x is the number of enum variants. // 2) Store the enum variant's index (0-based index based on order in the enum definition) in // reserved bits. // 3) If there is data in the enum variant, store the data in remaining bits. // The method for unpacking is as follows // 1) Mask the raw token to just the reserved bits // 2) Match the reserved bits to the enum variant token. // 3) If the indicated enum variant had data, extract it from the unreserved bits. // Calculates the number of bits needed to store the variant index. Essentially the log base 2 // of the number of variants, rounded up. fn variant_bits(variants: &[Variant]) -> u32 { if variants.is_empty() { // The degenerate case of no variants. 0 } else { variants.len().next_power_of_two().trailing_zeros() } } // Name of the field if it has one, otherwise 0 assuming this is the zeroth // field of a tuple variant. fn field_member(field: &Field) -> Member { match &field.ident { Some(name) => Member::Named(name.clone()), None => Member::Unnamed(Index::from(0)), } } // Generates the function body for `as_raw_token`. fn generate_as_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream { let variant_bits = variant_bits(variants); // Each iteration corresponds to one variant's match arm. let cases = variants.iter().enumerate().map(|(index, variant)| { let variant_name = &variant.ident; let index = index as u64; // The capture string is for everything between the variant identifier and the `=>` in // the match arm: the variant's data capture. let capture = variant.fields.iter().next().map(|field| { let member = field_member(field); quote!({ #member: data }) }); // The modifier string ORs the variant index with extra bits from the variant data // field. let modifier = match variant.fields { Fields::Named(_) | Fields::Unnamed(_) => Some(quote! { | ((data as u64) << #variant_bits) }), Fields::Unit => None, }; // Assembly of the match arm. quote! { #enum_name::#variant_name #capture => #index #modifier } }); quote! { match *self { #( #cases, )* } } }<|fim▁hole|> let variant_bits = variant_bits(variants); let variant_mask = ((1 << variant_bits) - 1) as u64; // Each iteration corresponds to one variant's match arm. let cases = variants.iter().enumerate().map(|(index, variant)| { let variant_name = &variant.ident; let index = index as u64; // The data string is for extracting the enum variant's data bits out of the raw token // data, which includes both variant index and data bits. let data = variant.fields.iter().next().map(|field| { let member = field_member(field); let ty = &field.ty; quote!({ #member: (data >> #variant_bits) as #ty }) }); // Assembly of the match arm. quote! { #index => #enum_name::#variant_name #data } }); quote! { // The match expression only matches the bits for the variant index. match data & #variant_mask { #( #cases, )* _ => unreachable!(), } } } // The proc_macro::TokenStream type can only be constructed from within a // procedural macro, meaning that unit tests are not able to invoke `fn // poll_token` below as an ordinary Rust function. We factor out the logic into // a signature that deals with Syn and proc-macro2 types only which are not // restricted to a procedural macro invocation. fn poll_token_inner(input: DeriveInput) -> TokenStream { let variants: Vec<Variant> = match input.data { Data::Enum(data) => data.variants.into_iter().collect(), Data::Struct(_) | Data::Union(_) => panic!("input must be an enum"), }; for variant in &variants { assert!(variant.fields.iter().count() <= 1); } // Given our basic model of a user given enum that is suitable as a token, we generate the // implementation. The implementation is NOT always well formed, such as when a variant's data // type is not bit shiftable or castable to u64, but we let Rust generate such errors as it // would be difficult to detect every kind of error. Importantly, every implementation that we // generate here and goes on to compile succesfully is sound. let enum_name = input.ident; let as_raw_token = generate_as_raw_token(&enum_name, &variants); let from_raw_token = generate_from_raw_token(&enum_name, &variants); quote! { impl PollToken for #enum_name { fn as_raw_token(&self) -> u64 { #as_raw_token } fn from_raw_token(data: u64) -> Self { #from_raw_token } } } } /// Implements the PollToken trait for a given `enum`. /// /// There are limitations on what `enum`s this custom derive will work on: /// /// * Each variant must be a unit variant (no data), or have a single (un)named data field. /// * If a variant has data, it must be a primitive type castable to and from a `u64`. /// * If a variant data has size greater than or equal to a `u64`, its most significant bits must be /// zero. The number of bits truncated is equal to the number of bits used to store the variant /// index plus the number of bits above 64. #[proc_macro_derive(PollToken)] pub fn poll_token(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); poll_token_inner(input).into() }<|fim▁end|>
// Generates the function body for `from_raw_token`. fn generate_from_raw_token(enum_name: &Ident, variants: &[Variant]) -> TokenStream {
<|file_name|>setUpNavigator.js<|end_file_name|><|fim▁begin|>/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @flow strict<|fim▁hole|> * @format */ 'use strict'; const {polyfillObjectProperty} = require('../Utilities/PolyfillFunctions'); let navigator = global.navigator; if (navigator === undefined) { global.navigator = navigator = {}; } // see https://github.com/facebook/react-native/issues/10881 polyfillObjectProperty(navigator, 'product', () => 'ReactNative');<|fim▁end|>
<|file_name|>VersionedHashRevisionTracker.cc<|end_file_name|><|fim▁begin|>// ---------------------------------------------------------------------- // File: VersionedHashRevisionTracker.cc // Author: Georgios Bitzes - CERN // ---------------------------------------------------------------------- /************************************************************************ * quarkdb - a redis-like highly available key-value store * * Copyright (C) 2019 CERN/Switzerland * * * * This program is free software: you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation, either version 3 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * *<|fim▁hole|>#include "VersionedHashRevisionTracker.hh" #include "utils/Macros.hh" #include "Formatter.hh" namespace quarkdb { //------------------------------------------------------------------------------ // Indicate which revision we're referring to. When called multiple times // for the same object, the given value MUST be the same. //------------------------------------------------------------------------------ void VersionedHashRevision::setRevisionNumber(uint64_t rev) { if(currentRevision != 0) { qdb_assert(currentRevision == rev); } else { currentRevision = rev; } } //------------------------------------------------------------------------------ // Add to update batch - empty value indicates deletion //------------------------------------------------------------------------------ void VersionedHashRevision::addUpdate(std::string_view field, std::string_view value) { updateBatch.emplace_back(field, value); } //------------------------------------------------------------------------------ // Serialize contents //------------------------------------------------------------------------------ std::string VersionedHashRevision::serialize() const { return Formatter::vhashRevision(currentRevision, updateBatch).val; } //------------------------------------------------------------------------------ // Get revision for a specific key //------------------------------------------------------------------------------ VersionedHashRevision& VersionedHashRevisionTracker::forKey(std::string_view key) { return contents[std::string(key)]; } //------------------------------------------------------------------------------ // Iterate through contents //------------------------------------------------------------------------------ std::map<std::string, VersionedHashRevision>::iterator VersionedHashRevisionTracker::begin() { return contents.begin(); } std::map<std::string, VersionedHashRevision>::iterator VersionedHashRevisionTracker::end() { return contents.end(); } }<|fim▁end|>
* You should have received a copy of the GNU General Public License * * along with this program. If not, see <http://www.gnu.org/licenses/>.* ************************************************************************/