max_stars_repo_path
stringlengths
4
286
max_stars_repo_name
stringlengths
5
119
max_stars_count
int64
0
191k
id
stringlengths
1
7
content
stringlengths
6
1.03M
content_cleaned
stringlengths
6
1.03M
language
stringclasses
111 values
language_score
float64
0.03
1
comments
stringlengths
0
556k
edu_score
float64
0.32
5.03
edu_int_score
int64
0
5
python/GafferUI/ScriptEditor.py
PaulDoessel/gaffer-play
0
9800
########################################################################## # # Copyright (c) 2011-2012, <NAME>. All rights reserved. # Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of <NAME> nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import ast import sys import traceback import IECore import Gaffer import GafferUI QtGui = GafferUI._qtImport( "QtGui" ) QtCore = GafferUI._qtImport( "QtCore" ) ## \todo Custom right click menu with script load, save, execute file, undo, redo etc. ## \todo Standard way for users to customise all menus ## \todo Tab completion and popup help. rlcompleter module should be useful for tab completion. Completer( dict ) constructs a completer # that works in a specific namespace. class ScriptEditor( GafferUI.EditorWidget ) : def __init__( self, scriptNode, **kw ) : self.__splittable = GafferUI.SplitContainer() GafferUI.EditorWidget.__init__( self, self.__splittable, scriptNode, **kw ) self.__outputWidget = GafferUI.MultiLineTextWidget( editable = False, wrapMode = GafferUI.MultiLineTextWidget.WrapMode.None, role = GafferUI.MultiLineTextWidget.Role.Code, ) self.__inputWidget = GafferUI.MultiLineTextWidget( wrapMode = GafferUI.MultiLineTextWidget.WrapMode.None, role = GafferUI.MultiLineTextWidget.Role.Code, ) self.__splittable.append( self.__outputWidget ) self.__splittable.append( self.__inputWidget ) self.__inputWidgetActivatedConnection = self.__inputWidget.activatedSignal().connect( Gaffer.WeakMethod( self.__activated ) ) self.__inputWidgetDropTextConnection = self.__inputWidget.dropTextSignal().connect( Gaffer.WeakMethod( self.__dropText ) ) self.__executionDict = { "IECore" : IECore, "Gaffer" : Gaffer, "GafferUI" : GafferUI, "script" : scriptNode, "parent" : scriptNode } def inputWidget( self ) : return self.__inputWidget def execute( self ) : # decide what to execute haveSelection = True toExecute = self.__inputWidget.selectedText() if not toExecute : haveSelection = False toExecute = self.__inputWidget.getText() # parse it first. this lets us give better error formatting # for syntax errors, and also figure out whether we can eval() # and display the result or must exec() only. try : parsed = ast.parse( toExecute ) except SyntaxError, e : self.__outputWidget.appendHTML( self.__syntaxErrorToHTML( e ) ) return # execute it self.__outputWidget.appendHTML( self.__codeToHTML( toExecute ) ) with Gaffer.OutputRedirection( stdOut = Gaffer.WeakMethod( self.__redirectOutput ), stdErr = Gaffer.WeakMethod( self.__redirectOutput ) ) : with _MessageHandler( self.__outputWidget ) : with Gaffer.UndoContext( self.scriptNode() ) : with self.getContext() : try : if len( parsed.body ) == 1 and isinstance( parsed.body[0], ast.Expr ) : result = eval( toExecute, self.__executionDict, self.__executionDict ) if result is not None : self.__outputWidget.appendText( str( result ) ) else : exec( toExecute, self.__executionDict, self.__executionDict ) if not haveSelection : self.__inputWidget.setText( "" ) except Exception, e : self.__outputWidget.appendHTML( self.__exceptionToHTML() ) def __repr__( self ) : return "GafferUI.ScriptEditor( scriptNode )" def __activated( self, widget ) : self.execute() return True def __dropText( self, widget, dragData ) : if isinstance( dragData, IECore.StringVectorData ) : return repr( list( dragData ) ) elif isinstance( dragData, Gaffer.GraphComponent ) : if self.scriptNode().isAncestorOf( dragData ) : return "script['" + dragData.relativeName( self.scriptNode() ).replace( ".", "']['" ) + "']" elif isinstance( dragData, Gaffer.Set ) : if len( dragData ) == 1 : return self.__dropText( widget, dragData[0] ) else : return "[ " + ", ".join( [ self.__dropText( widget, d ) for d in dragData ] ) + " ]" elif isinstance( dragData, IECore.Data ) and hasattr( dragData, "value" ) : return repr( dragData.value ) return None def __codeToHTML( self, code ) : code = code.replace( "<", "&lt;" ).replace( ">", "&gt;" ) return "<pre>" + code + "</pre>" def __syntaxErrorToHTML( self, syntaxError ) : formatted = traceback.format_exception_only( SyntaxError, syntaxError ) lineNumber = formatted[0].rpartition( "," )[2].strip() headingText = formatted[-1].replace( ":", " : " + lineNumber + " : ", 1 ) result = "<h1 class='ERROR'>%s</h1>" % headingText result += "<br>" + self.__codeToHTML( "".join( formatted[1:-1] ) ) return result def __exceptionToHTML( self ) : t = traceback.extract_tb( sys.exc_info()[2] ) lineNumber = str( t[1][1] ) headingText = traceback.format_exception_only( *(sys.exc_info()[:2]) )[0].replace( ":", " : line " + lineNumber + " : ", 1 ) result = "<h1 class='ERROR'>%s</h1>" % headingText if len( t ) > 2 : result += "<br>" + self.__codeToHTML( "".join( traceback.format_list( t[2:] ) ) ) return result def __redirectOutput( self, output ) : if output != "\n" : self.__outputWidget.appendText( output ) # update the gui so messages are output as they occur, rather than all getting queued # up till the end. QtGui.QApplication.instance().processEvents( QtCore.QEventLoop.ExcludeUserInputEvents ) GafferUI.EditorWidget.registerType( "ScriptEditor", ScriptEditor ) class _MessageHandler( IECore.MessageHandler ) : def __init__( self, textWidget ) : IECore.MessageHandler.__init__( self ) self.__textWidget = textWidget def handle( self, level, context, message ) : html = formatted = "<h1 class='%s'>%s : %s </h1><span class='message'>%s</span><br>" % ( IECore.Msg.levelAsString( level ), IECore.Msg.levelAsString( level ), context, message.replace( "\n", "<br>" ) ) self.__textWidget.appendHTML( html ) # update the gui so messages are output as they occur, rather than all getting queued # up till the end. QtGui.QApplication.instance().processEvents( QtCore.QEventLoop.ExcludeUserInputEvents )
########################################################################## # # Copyright (c) 2011-2012, <NAME>. All rights reserved. # Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of <NAME> nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import ast import sys import traceback import IECore import Gaffer import GafferUI QtGui = GafferUI._qtImport( "QtGui" ) QtCore = GafferUI._qtImport( "QtCore" ) ## \todo Custom right click menu with script load, save, execute file, undo, redo etc. ## \todo Standard way for users to customise all menus ## \todo Tab completion and popup help. rlcompleter module should be useful for tab completion. Completer( dict ) constructs a completer # that works in a specific namespace. class ScriptEditor( GafferUI.EditorWidget ) : def __init__( self, scriptNode, **kw ) : self.__splittable = GafferUI.SplitContainer() GafferUI.EditorWidget.__init__( self, self.__splittable, scriptNode, **kw ) self.__outputWidget = GafferUI.MultiLineTextWidget( editable = False, wrapMode = GafferUI.MultiLineTextWidget.WrapMode.None, role = GafferUI.MultiLineTextWidget.Role.Code, ) self.__inputWidget = GafferUI.MultiLineTextWidget( wrapMode = GafferUI.MultiLineTextWidget.WrapMode.None, role = GafferUI.MultiLineTextWidget.Role.Code, ) self.__splittable.append( self.__outputWidget ) self.__splittable.append( self.__inputWidget ) self.__inputWidgetActivatedConnection = self.__inputWidget.activatedSignal().connect( Gaffer.WeakMethod( self.__activated ) ) self.__inputWidgetDropTextConnection = self.__inputWidget.dropTextSignal().connect( Gaffer.WeakMethod( self.__dropText ) ) self.__executionDict = { "IECore" : IECore, "Gaffer" : Gaffer, "GafferUI" : GafferUI, "script" : scriptNode, "parent" : scriptNode } def inputWidget( self ) : return self.__inputWidget def execute( self ) : # decide what to execute haveSelection = True toExecute = self.__inputWidget.selectedText() if not toExecute : haveSelection = False toExecute = self.__inputWidget.getText() # parse it first. this lets us give better error formatting # for syntax errors, and also figure out whether we can eval() # and display the result or must exec() only. try : parsed = ast.parse( toExecute ) except SyntaxError, e : self.__outputWidget.appendHTML( self.__syntaxErrorToHTML( e ) ) return # execute it self.__outputWidget.appendHTML( self.__codeToHTML( toExecute ) ) with Gaffer.OutputRedirection( stdOut = Gaffer.WeakMethod( self.__redirectOutput ), stdErr = Gaffer.WeakMethod( self.__redirectOutput ) ) : with _MessageHandler( self.__outputWidget ) : with Gaffer.UndoContext( self.scriptNode() ) : with self.getContext() : try : if len( parsed.body ) == 1 and isinstance( parsed.body[0], ast.Expr ) : result = eval( toExecute, self.__executionDict, self.__executionDict ) if result is not None : self.__outputWidget.appendText( str( result ) ) else : exec( toExecute, self.__executionDict, self.__executionDict ) if not haveSelection : self.__inputWidget.setText( "" ) except Exception, e : self.__outputWidget.appendHTML( self.__exceptionToHTML() ) def __repr__( self ) : return "GafferUI.ScriptEditor( scriptNode )" def __activated( self, widget ) : self.execute() return True def __dropText( self, widget, dragData ) : if isinstance( dragData, IECore.StringVectorData ) : return repr( list( dragData ) ) elif isinstance( dragData, Gaffer.GraphComponent ) : if self.scriptNode().isAncestorOf( dragData ) : return "script['" + dragData.relativeName( self.scriptNode() ).replace( ".", "']['" ) + "']" elif isinstance( dragData, Gaffer.Set ) : if len( dragData ) == 1 : return self.__dropText( widget, dragData[0] ) else : return "[ " + ", ".join( [ self.__dropText( widget, d ) for d in dragData ] ) + " ]" elif isinstance( dragData, IECore.Data ) and hasattr( dragData, "value" ) : return repr( dragData.value ) return None def __codeToHTML( self, code ) : code = code.replace( "<", "&lt;" ).replace( ">", "&gt;" ) return "<pre>" + code + "</pre>" def __syntaxErrorToHTML( self, syntaxError ) : formatted = traceback.format_exception_only( SyntaxError, syntaxError ) lineNumber = formatted[0].rpartition( "," )[2].strip() headingText = formatted[-1].replace( ":", " : " + lineNumber + " : ", 1 ) result = "<h1 class='ERROR'>%s</h1>" % headingText result += "<br>" + self.__codeToHTML( "".join( formatted[1:-1] ) ) return result def __exceptionToHTML( self ) : t = traceback.extract_tb( sys.exc_info()[2] ) lineNumber = str( t[1][1] ) headingText = traceback.format_exception_only( *(sys.exc_info()[:2]) )[0].replace( ":", " : line " + lineNumber + " : ", 1 ) result = "<h1 class='ERROR'>%s</h1>" % headingText if len( t ) > 2 : result += "<br>" + self.__codeToHTML( "".join( traceback.format_list( t[2:] ) ) ) return result def __redirectOutput( self, output ) : if output != "\n" : self.__outputWidget.appendText( output ) # update the gui so messages are output as they occur, rather than all getting queued # up till the end. QtGui.QApplication.instance().processEvents( QtCore.QEventLoop.ExcludeUserInputEvents ) GafferUI.EditorWidget.registerType( "ScriptEditor", ScriptEditor ) class _MessageHandler( IECore.MessageHandler ) : def __init__( self, textWidget ) : IECore.MessageHandler.__init__( self ) self.__textWidget = textWidget def handle( self, level, context, message ) : html = formatted = "<h1 class='%s'>%s : %s </h1><span class='message'>%s</span><br>" % ( IECore.Msg.levelAsString( level ), IECore.Msg.levelAsString( level ), context, message.replace( "\n", "<br>" ) ) self.__textWidget.appendHTML( html ) # update the gui so messages are output as they occur, rather than all getting queued # up till the end. QtGui.QApplication.instance().processEvents( QtCore.QEventLoop.ExcludeUserInputEvents )
en
0.691688
########################################################################## # # Copyright (c) 2011-2012, <NAME>. All rights reserved. # Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of <NAME> nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## ## \todo Custom right click menu with script load, save, execute file, undo, redo etc. ## \todo Standard way for users to customise all menus ## \todo Tab completion and popup help. rlcompleter module should be useful for tab completion. Completer( dict ) constructs a completer # that works in a specific namespace. # decide what to execute # parse it first. this lets us give better error formatting # for syntax errors, and also figure out whether we can eval() # and display the result or must exec() only. # execute it # update the gui so messages are output as they occur, rather than all getting queued # up till the end. # update the gui so messages are output as they occur, rather than all getting queued # up till the end.
0.835943
1
03/triangle.py
machinelearningdeveloper/aoc_2016
0
9801
<reponame>machinelearningdeveloper/aoc_2016<filename>03/triangle.py """Test whether putative triangles, specified as triples of side lengths, in fact are possible.""" def load_triangles(filename): """Load triangles from filename.""" triangles = [] with open(filename) as f: for line in f: if line.strip(): triangles.append(tuple([int(side) for side in line.split()])) return triangles def load_triangles_from_cols(filename): """Instead of loading one triangle per line, load one-third each of three triangles per line.""" xs = [] ys = [] zs = [] with open(filename) as f: for line in f: if line.strip(): x, y, z = [int(side) for side in line.split()] xs.append(x) ys.append(y) zs.append(z) return ([(xs[i], xs[i+1], xs[i+2]) for i in range(0, len(xs), 3)] + [(ys[i], ys[i+1], ys[i+2]) for i in range(0, len(ys), 3)] + [(zs[i], zs[i+1], zs[i+2]) for i in range(0, len(zs), 3)]) def is_possible(*sides): """The sum of the lengths of every pair of sides in a, b, c must be larger than the length of the remaining side, or the putative triangle is impossible.""" for a in [0, 1]: for b in range(a + 1, 3): if a == 0: c = 1 if b == 2 else 2 elif a == 1: c = 0 if sum([sides[a], sides[b]]) <= sides[c]: return False return True
"""Test whether putative triangles, specified as triples of side lengths, in fact are possible.""" def load_triangles(filename): """Load triangles from filename.""" triangles = [] with open(filename) as f: for line in f: if line.strip(): triangles.append(tuple([int(side) for side in line.split()])) return triangles def load_triangles_from_cols(filename): """Instead of loading one triangle per line, load one-third each of three triangles per line.""" xs = [] ys = [] zs = [] with open(filename) as f: for line in f: if line.strip(): x, y, z = [int(side) for side in line.split()] xs.append(x) ys.append(y) zs.append(z) return ([(xs[i], xs[i+1], xs[i+2]) for i in range(0, len(xs), 3)] + [(ys[i], ys[i+1], ys[i+2]) for i in range(0, len(ys), 3)] + [(zs[i], zs[i+1], zs[i+2]) for i in range(0, len(zs), 3)]) def is_possible(*sides): """The sum of the lengths of every pair of sides in a, b, c must be larger than the length of the remaining side, or the putative triangle is impossible.""" for a in [0, 1]: for b in range(a + 1, 3): if a == 0: c = 1 if b == 2 else 2 elif a == 1: c = 0 if sum([sides[a], sides[b]]) <= sides[c]: return False return True
en
0.873429
Test whether putative triangles, specified as triples of side lengths, in fact are possible. Load triangles from filename. Instead of loading one triangle per line, load one-third each of three triangles per line. The sum of the lengths of every pair of sides in a, b, c must be larger than the length of the remaining side, or the putative triangle is impossible.
4.221121
4
ExerciciosdePython/ex049.py
aleksandromelo/Exercicios
0
9802
num = int(input('Digite um número para ver sua tabuada: ')) for i in range(1, 11): print('{} x {:2} = {}'.format(num, i, num * i))
num = int(input('Digite um número para ver sua tabuada: ')) for i in range(1, 11): print('{} x {:2} = {}'.format(num, i, num * i))
none
1
3.998134
4
ebmeta/actions/version.py
bkidwell/ebmeta-old
1
9803
<gh_stars>1-10 """Print ebmeta version number.""" import sys import ebmeta def run(): print "{} {}".format(ebmeta.PROGRAM_NAME, ebmeta.VERSION) sys.exit(0)
"""Print ebmeta version number.""" import sys import ebmeta def run(): print "{} {}".format(ebmeta.PROGRAM_NAME, ebmeta.VERSION) sys.exit(0)
en
0.485589
Print ebmeta version number.
1.742575
2
backend/api/tests/mixins/credit_trade_relationship.py
amichard/tfrs
18
9804
<gh_stars>10-100 # -*- coding: utf-8 -*- # pylint: disable=no-member,invalid-name,duplicate-code """ REST API Documentation for the NRS TFRS Credit Trading Application The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation. OpenAPI spec version: v1 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import datetime import json import logging from typing import Callable from collections import namedtuple, defaultdict from enum import Enum from api.models.CreditTrade import CreditTrade from api.models.CreditTradeStatus import CreditTradeStatus class CreditTradeRelationshipMixin(object): """ Mixin to provide user mapping for related parties to credit transactions """ class UserRelationship(Enum): """ Enumerates the ways in which a client (user) can be related to a credit trade """ INITIATOR = 1 RESPONDENT = 2 THIRD_PARTY = 3 GOVERNMENT_ANALYST = 4 GOVERNMENT_DIRECTOR = 5 user_map = { UserRelationship.INITIATOR: 'fs_user_1', UserRelationship.RESPONDENT: 'fs_user_2', UserRelationship.THIRD_PARTY: 'fs_user_3', UserRelationship.GOVERNMENT_ANALYST: 'gov_analyst', UserRelationship.GOVERNMENT_DIRECTOR: 'gov_director' } class CreditTradeFlowHooksMixin(object): ChangeRecord = namedtuple('ChangeRecord', [ 'trade_id', 'requesting_username', 'relationship', 'expected_to_be_successful', 'data_before_request', 'data_after_request', 'response_code' ]) PreChangeRecord = namedtuple('PreChangeRecord', [ 'trade_id', 'current_status', 'rescinded', 'status_change' ]) StatusChange = namedtuple('StatusChange', [ 'relationship', 'status', 'rescinded' ]) def _sensible_status_changes(self, current_status, rescinded): """ Return a list of valid potential status changes for a given starting state """ status_changes = defaultdict(lambda: []) status_changes[('Draft', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Submitted', False), self.StatusChange(self.UserRelationship.INITIATOR, 'Cancelled', False) ] status_changes[('Submitted', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Submitted', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Accepted', False), self.StatusChange(self.UserRelationship.RESPONDENT, 'Refused', False) ] status_changes[('Accepted', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Accepted', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Accepted', True), # rescind self.StatusChange(self.UserRelationship.GOVERNMENT_ANALYST, 'Recommended', False), self.StatusChange(self.UserRelationship.GOVERNMENT_ANALYST, 'Not Recommended', False) ] status_changes[('Recommended', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Recommended', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Recommended', True), # rescind self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Approved', False), self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Declined', False) ] status_changes[('Not Recommended', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Not Recommended', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Not Recommended', True), # rescind self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Approved', False), self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Declined', False) ] return status_changes[(current_status, rescinded)] def _path_builder(self, node, path=[], valid_paths=[]): """ Recursively build an array of valid paths through the status tree """ s = self._sensible_status_changes(node.status, node.rescinded) is_leaf = not s path = path + [node] if is_leaf: valid_paths.append(path) # end of the line for branch in s: self._path_builder(branch, path, valid_paths) return valid_paths def check_credit_trade_workflow( self, before_change_callback: Callable[[PreChangeRecord], None] = lambda x: None, after_change_callback: Callable[[ChangeRecord], None] = lambda x: None, path_end_callback: Callable[[], None] = lambda: None, modify_request_payload: Callable[[dict], None] = lambda x: None ): """ Evaluate all normal status paths through the application via REST API as appropriate users with callbacks for tests: before_change_callback called just before a status change. Initial status and trade_id may be None after_change_callback called after a change data_before_request can be None if this was a creation path_end_callback called when this pathway is done (another will begin unless this was the last) """ initiating_org = self.users[ self.user_map[ self.UserRelationship.INITIATOR ]].organization responding_org = self.users[ self.user_map[ self.UserRelationship.RESPONDENT ]].organization payload = { 'fairMarketValuePerCredit': 1, 'initiator': initiating_org.id, 'numberOfCredits': 1, 'respondent': responding_org.id, 'tradeEffectiveDate': datetime.datetime.today().strftime('%Y-%m-%d'), 'type': self.credit_trade_types['sell'].id, 'zeroReason': None } valid_paths = (self._path_builder( self.StatusChange(self.UserRelationship.INITIATOR, 'Draft', False) )) for path in valid_paths: logging.debug('evaluating path: {}'.format( '\n'.join( [ '{} sets status to {} and rescinded to {}'.format( c.relationship, c.status, c.rescinded) for c in path ] ))) trade_id = None response_data = None for node in path: before_change_callback(self.PreChangeRecord( trade_id, CreditTrade.objects.filter( id=trade_id ).first().status.status if trade_id else None, CreditTrade.objects.filter( id=trade_id ).first().is_rescinded if trade_id else None, node )) payload['status'] = CreditTradeStatus.objects.get_by_natural_key(node.status).id payload['is_rescinded'] = node.rescinded modify_request_payload(payload) if not trade_id: response = self.clients[self.user_map[node.relationship]].post( '/api/credit_trades', content_type='application/json', data=json.dumps(payload) ) else: response = self.clients[self.user_map[node.relationship]].put( '/api/credit_trades/{}'.format(trade_id), content_type='application/json', data=json.dumps(payload) ) previous_response_data = response_data response_data = json.loads(response.content.decode('utf-8')) trade_id = response_data['id'] if 'id' in response_data else trade_id after_change_callback(self.ChangeRecord( trade_id, self.user_map[node.relationship], node.relationship, True, previous_response_data, response_data, response.status_code )) path_end_callback()
# -*- coding: utf-8 -*- # pylint: disable=no-member,invalid-name,duplicate-code """ REST API Documentation for the NRS TFRS Credit Trading Application The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation. OpenAPI spec version: v1 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import datetime import json import logging from typing import Callable from collections import namedtuple, defaultdict from enum import Enum from api.models.CreditTrade import CreditTrade from api.models.CreditTradeStatus import CreditTradeStatus class CreditTradeRelationshipMixin(object): """ Mixin to provide user mapping for related parties to credit transactions """ class UserRelationship(Enum): """ Enumerates the ways in which a client (user) can be related to a credit trade """ INITIATOR = 1 RESPONDENT = 2 THIRD_PARTY = 3 GOVERNMENT_ANALYST = 4 GOVERNMENT_DIRECTOR = 5 user_map = { UserRelationship.INITIATOR: 'fs_user_1', UserRelationship.RESPONDENT: 'fs_user_2', UserRelationship.THIRD_PARTY: 'fs_user_3', UserRelationship.GOVERNMENT_ANALYST: 'gov_analyst', UserRelationship.GOVERNMENT_DIRECTOR: 'gov_director' } class CreditTradeFlowHooksMixin(object): ChangeRecord = namedtuple('ChangeRecord', [ 'trade_id', 'requesting_username', 'relationship', 'expected_to_be_successful', 'data_before_request', 'data_after_request', 'response_code' ]) PreChangeRecord = namedtuple('PreChangeRecord', [ 'trade_id', 'current_status', 'rescinded', 'status_change' ]) StatusChange = namedtuple('StatusChange', [ 'relationship', 'status', 'rescinded' ]) def _sensible_status_changes(self, current_status, rescinded): """ Return a list of valid potential status changes for a given starting state """ status_changes = defaultdict(lambda: []) status_changes[('Draft', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Submitted', False), self.StatusChange(self.UserRelationship.INITIATOR, 'Cancelled', False) ] status_changes[('Submitted', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Submitted', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Accepted', False), self.StatusChange(self.UserRelationship.RESPONDENT, 'Refused', False) ] status_changes[('Accepted', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Accepted', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Accepted', True), # rescind self.StatusChange(self.UserRelationship.GOVERNMENT_ANALYST, 'Recommended', False), self.StatusChange(self.UserRelationship.GOVERNMENT_ANALYST, 'Not Recommended', False) ] status_changes[('Recommended', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Recommended', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Recommended', True), # rescind self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Approved', False), self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Declined', False) ] status_changes[('Not Recommended', False)] = [ self.StatusChange(self.UserRelationship.INITIATOR, 'Not Recommended', True), # rescind self.StatusChange(self.UserRelationship.RESPONDENT, 'Not Recommended', True), # rescind self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Approved', False), self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR, 'Declined', False) ] return status_changes[(current_status, rescinded)] def _path_builder(self, node, path=[], valid_paths=[]): """ Recursively build an array of valid paths through the status tree """ s = self._sensible_status_changes(node.status, node.rescinded) is_leaf = not s path = path + [node] if is_leaf: valid_paths.append(path) # end of the line for branch in s: self._path_builder(branch, path, valid_paths) return valid_paths def check_credit_trade_workflow( self, before_change_callback: Callable[[PreChangeRecord], None] = lambda x: None, after_change_callback: Callable[[ChangeRecord], None] = lambda x: None, path_end_callback: Callable[[], None] = lambda: None, modify_request_payload: Callable[[dict], None] = lambda x: None ): """ Evaluate all normal status paths through the application via REST API as appropriate users with callbacks for tests: before_change_callback called just before a status change. Initial status and trade_id may be None after_change_callback called after a change data_before_request can be None if this was a creation path_end_callback called when this pathway is done (another will begin unless this was the last) """ initiating_org = self.users[ self.user_map[ self.UserRelationship.INITIATOR ]].organization responding_org = self.users[ self.user_map[ self.UserRelationship.RESPONDENT ]].organization payload = { 'fairMarketValuePerCredit': 1, 'initiator': initiating_org.id, 'numberOfCredits': 1, 'respondent': responding_org.id, 'tradeEffectiveDate': datetime.datetime.today().strftime('%Y-%m-%d'), 'type': self.credit_trade_types['sell'].id, 'zeroReason': None } valid_paths = (self._path_builder( self.StatusChange(self.UserRelationship.INITIATOR, 'Draft', False) )) for path in valid_paths: logging.debug('evaluating path: {}'.format( '\n'.join( [ '{} sets status to {} and rescinded to {}'.format( c.relationship, c.status, c.rescinded) for c in path ] ))) trade_id = None response_data = None for node in path: before_change_callback(self.PreChangeRecord( trade_id, CreditTrade.objects.filter( id=trade_id ).first().status.status if trade_id else None, CreditTrade.objects.filter( id=trade_id ).first().is_rescinded if trade_id else None, node )) payload['status'] = CreditTradeStatus.objects.get_by_natural_key(node.status).id payload['is_rescinded'] = node.rescinded modify_request_payload(payload) if not trade_id: response = self.clients[self.user_map[node.relationship]].post( '/api/credit_trades', content_type='application/json', data=json.dumps(payload) ) else: response = self.clients[self.user_map[node.relationship]].put( '/api/credit_trades/{}'.format(trade_id), content_type='application/json', data=json.dumps(payload) ) previous_response_data = response_data response_data = json.loads(response.content.decode('utf-8')) trade_id = response_data['id'] if 'id' in response_data else trade_id after_change_callback(self.ChangeRecord( trade_id, self.user_map[node.relationship], node.relationship, True, previous_response_data, response_data, response.status_code )) path_end_callback()
en
0.879166
# -*- coding: utf-8 -*- # pylint: disable=no-member,invalid-name,duplicate-code REST API Documentation for the NRS TFRS Credit Trading Application The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation. OpenAPI spec version: v1 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Mixin to provide user mapping for related parties to credit transactions Enumerates the ways in which a client (user) can be related to a credit trade Return a list of valid potential status changes for a given starting state # rescind # rescind # rescind # rescind # rescind # rescind # rescind Recursively build an array of valid paths through the status tree # end of the line Evaluate all normal status paths through the application via REST API as appropriate users with callbacks for tests: before_change_callback called just before a status change. Initial status and trade_id may be None after_change_callback called after a change data_before_request can be None if this was a creation path_end_callback called when this pathway is done (another will begin unless this was the last)
2.202087
2
superset/superset_config.py
panchohumeres/dynamo-covid
4
9805
import os SERVER_NAME = os.getenv('DOMAIN_SUPERSET') PUBLIC_ROLE_LIKE_GAMMA = True SESSION_COOKIE_SAMESITE = None # One of [None, 'Lax', 'Strict'] SESSION_COOKIE_HTTPONLY = False MAPBOX_API_KEY = os.getenv('MAPBOX_API_KEY', '') POSTGRES_DB=os.getenv('POSTGRES_DB') POSTGRES_PASSWORD=os.getenv('POSTGRES_PASSWORD') POSTGRES_USER=os.getenv('POSTGRES_USER') POSTGRES_PORT=str(os.getenv('POSTGRES_PORT')) HTTP_HEADERS = {'X-Frame-Options': 'ALLOWALL'} sql_alchemy_string='postgresql+psycopg2://'+POSTGRES_USER+':'+POSTGRES_PASSWORD+'@postgres:'+POSTGRES_PORT+'/'+POSTGRES_DB CACHE_CONFIG = { 'CACHE_TYPE': 'redis', 'CACHE_DEFAULT_TIMEOUT': 300, 'CACHE_KEY_PREFIX': 'superset_', 'CACHE_REDIS_HOST': 'redis', 'CACHE_REDIS_PORT': 6379, 'CACHE_REDIS_DB': 1, 'CACHE_REDIS_URL': 'redis://redis:6379/1'} SQLALCHEMY_DATABASE_URI = \ sql_alchemy_string SQLALCHEMY_TRACK_MODIFICATIONS = True SECRET_KEY = 'thisISaSECRET_1234'
import os SERVER_NAME = os.getenv('DOMAIN_SUPERSET') PUBLIC_ROLE_LIKE_GAMMA = True SESSION_COOKIE_SAMESITE = None # One of [None, 'Lax', 'Strict'] SESSION_COOKIE_HTTPONLY = False MAPBOX_API_KEY = os.getenv('MAPBOX_API_KEY', '') POSTGRES_DB=os.getenv('POSTGRES_DB') POSTGRES_PASSWORD=os.getenv('POSTGRES_PASSWORD') POSTGRES_USER=os.getenv('POSTGRES_USER') POSTGRES_PORT=str(os.getenv('POSTGRES_PORT')) HTTP_HEADERS = {'X-Frame-Options': 'ALLOWALL'} sql_alchemy_string='postgresql+psycopg2://'+POSTGRES_USER+':'+POSTGRES_PASSWORD+'@postgres:'+POSTGRES_PORT+'/'+POSTGRES_DB CACHE_CONFIG = { 'CACHE_TYPE': 'redis', 'CACHE_DEFAULT_TIMEOUT': 300, 'CACHE_KEY_PREFIX': 'superset_', 'CACHE_REDIS_HOST': 'redis', 'CACHE_REDIS_PORT': 6379, 'CACHE_REDIS_DB': 1, 'CACHE_REDIS_URL': 'redis://redis:6379/1'} SQLALCHEMY_DATABASE_URI = \ sql_alchemy_string SQLALCHEMY_TRACK_MODIFICATIONS = True SECRET_KEY = 'thisISaSECRET_1234'
en
0.895065
# One of [None, 'Lax', 'Strict']
1.877889
2
mybot.py
johnnyboiii3020/matchmaking-bot
0
9806
<reponame>johnnyboiii3020/matchmaking-bot import discord import json import random import os from discord.ext import commands TOKEN = "" client = commands.Bot(command_prefix = '--') os.chdir(r'D:\Programming\Projects\Discord bot\jsonFiles') SoloCounter = 30 SolominCounter = 10 Queueiter = 1 T_Queueiter = 1 TeamCounter = 50 TeamminCounter = 20 extensions = [ "cogs.Matchmaking", "cogs.Moderator" ] @client.event async def on_ready(): botInfo = await client.application_info() oauthlink = discord.utils.oauth_url(botInfo.id) print('---------') print('Username: {}'.format(client.user.name)) print('ID: {}'.format(client.user.id)) print('Server count: {}'.format(str(len(client.servers)))) print('Member count: {}'.format(str(len(set(client.get_all_members()))))) print('OAuth URL: {}'.format(oauthlink)) print('Cogs: {}'.format(client.cogs)) print('---------') ######################### Register Team ################################# @client.command(pass_context = True) @commands.has_role('Registered') async def registerTeam( ctx , teamName , player1: discord.Member , player2: discord.Member , player3: discord.Member , player4: discord.Member , player5: discord.Member): if ctx.message.channel.id == "549911021511245834": with open('Teams.json' , 'r') as f: Teams = json.load(f) players = [player1 , player2 , player3 , player4 , player5] await update_data_Team(ctx , Teams , teamName , players) with open('Teams.json' , 'w') as f: json.dump(Teams , f , indent = 2) async def update_data_Team(ctx , Teams , teamName , players): if not teamName in Teams: Teams[teamName] = {} Teams[teamName]["teamElo"] = 0 Teams[teamName]["Players"] = [] Role = teamName await client.create_role(ctx.message.server , name = Role, hoist = True , mentionable = True ) TeamRole = discord.utils.get(ctx.message.server.roles , name = Role) for player in players: print(player) Teams[teamName]["Players"].append(player.mention) await client.add_roles(player , TeamRole) await client.say("{} is Registered as Team Cheers!!!!".format(teamName)) else: await client.say("you are already registered") ############################ Register Solo ################################### @client.command(pass_context = True) async def registersolo( ctx , name: discord.Member): if ctx.message.channel.id == "549911021511245834": with open('Solo.json' , 'r') as f: Solo = json.load(f) await update_data_solo(Solo , name , ctx) with open('Solo.json' , 'w') as f: json.dump(Solo , f , indent = 2) async def update_data_solo( Solo , name , player): if not player.message.author.mention in Solo: author = player.message.author.mention member = player.message.author Solo[author] = {} Solo[author]["name"] = name Solo[author]["Elo"] = 0 nickname = str(Solo[author]["Elo"]) + "~" + Solo[author]["name"] Role = discord.utils.get(player.message.server.roles , name = 'Registered') member.nick = nickname await client.add_roles(member , Role) await client.say("{} is Registered as Solo Cheers Guys!!!!".format(author)) else: await client.say("you are already registered") ############################### Win Team ################################ @client.command(pass_context = True) @commands.has_role('Mod') async def winT(ctx , T_Queueno , Team , Team2): with open('Teams_Queue.json' , 'r') as f: Teams_Queue = json.load(f) with open('Teams.json' , 'r') as f: Teams = json.load(f) Teams[Team]["teamElo"] = Teams[Team]["teamElo"] + TeamCounter Teams[Team2]["teamElo"] = Teams[Team2]["teamElo"] - TeamminCounter await display_win_team(Team , Team2) with open('Teams.json' , 'r') as f: json.dump(Teams , f , indent = 2) ###############CReate Team Queue Channel########################### @client.command(pass_context = True) @commands.has_role('Mod') async def CreateTQueueChannel(ctx): with open('Teams_Queue.json' , 'r') as f: Teams_Queue = json.load(f) Teams_Queue["1"] = [] with open('Teams_Queue.json' , 'w') as f: json.dump(Teams_Queue , f , indent = 2) ########################## Join Team Queue ################### @client.command(pass_context = True) @commands.has_role('Registered') async def joinQT(ctx , TeamName): if ctx.message.channel.id == "549910313995206687": with open('Teams.json' , 'r') as f: Teams = json.load(f) if "{}".format(TeamName) in Teams: with open('Teams_Queue.json' , 'r') as f: Teams_Queue = json.load(f) await update_data_Team_Queue(Teams_Queue , TeamName) with open('Teams_Queue.json' , 'w') as f: json.dump(Teams_Queue , f , indent = 2) else: await client.say("{} is not registerd".format(TeamName)) async def update_data_Team_Queue(Teams_Queue , TeamName): global T_Queueiter T_Queueno = T_Queueiter if len(Teams_Queue["{}".format(T_Queueno)]) >= 1: Teams_Queue[str(T_Queueno)].append(TeamName) await display_Team_Queue(T_Queueno , Teams_Queue , TeamName) await display_match(T_Queueno , Teams_Queue) T_Queueiter += 1 T_Queueno = T_Queueiter Teams_Queue[str(T_Queueno)] = [] else: if not TeamName in Teams_Queue[str(T_Queueno)]: Teams_Queue[str(T_Queueno)].append(TeamName) await display_Team_Queue(T_Queueno , Teams_Queue , TeamName) else: await client.say("{} is already in queue" .format(TeamName)) async def display_Team_Queue(T_Queueno , Teams_Queue , TeamName): embed = discord.Embed( title = "Team Queue : {}".format(T_Queueno), description = "5 v 5 Custom Games" ) embed.add_field(name = 'Team:' , value = "\n".join("<@{}>".format(Teams_Queue[T_Queueno])) , inline = False) await client.say(embed = embed) async def display_match(T_Queueno , Teams_Queue): embed = discord.Embed( title= "Team Matchup Queue : {}".format(T_Queueno), description = "5 v 5 Custom Games" ) embed.add_field(name = 'Teams:' , value = "\n".join(Teams_Queue[str(T_Queueno)]) , inline = False) with open('Maps.json' , 'r') as f: Maps = json.load(f) embed.add_field(name = 'Map:' , value = random.choice(Maps["Maps"])) await client.say(embed = embed) ################Show Queue################# @client.command(pass_context = True) @commands.has_role('Registered') async def showQ(ctx , Queueno): if ctx.message.channel.id == "549910313995206687": with open('Queue.json' , 'r') as f: Queue = json.load(f) if len(Queue[str(Queueno)]) < 0 : await client.say("Queue is empty") else: await DisplayQueue(Queue , Queueno) ###############Show Team Points########## @client.command(pass_context = True) @commadns.has_role('Registered') async def pointsT(ctx , TeamName): if ctx.message.channel.id == "551095980251021323": with open('Teams.json' , 'r') as f: Teams = json.load(f) if TeamName in Teams: await client.say("{}".format(Teams[TeamName][teamElo])) ####################Show Points ############### @client.command(pass_context = True) @commands.has_role('Registered') async def points(ctx): if ctx.message.channel.id == "551095980251021323": with open('Solo.json' , 'r') as f: Solo = json.load(f) if ctx.message.author.mention in Solo: await client.say("{}".format(Solo[ctx.message.author.mention]["Elo"]) + " points{}".format(ctx.message.author.mention)) ######################### Win Solo ############################## @client.command(pass_context = True) @commands.has_role('Mod' ) async def winS(ctx , Queueno , Teamno , Teamno2): with open('Solo_Teams.json' , 'r') as f: Solo_Teams = json.load(f) with open('Solo.json' , 'r') as f: Solo = json.load(f) await update_winS(Solo_Teams , Solo , Queueno , Teamno , Teamno2) with open('Solo.json' , 'w') as f: json.dump(Solo , f , indent = 2) async def update_winS(Solo_Teams , Solo , Queueno , Teamno , Teamno2): for player in Solo_Teams[str(Queueno)][str(Teamno)]: Solo[player]["Elo"] = Solo[player]["Elo"] + SoloCounter await update_nick(player) for players in Solo_Teams[str(Queueno)][str(Teamno2)]: Solo[players]["Elo"] = Solo[players]["Elo"] - SolominCounter await update_nick(player) await display_updates(Solo_Teams , Teamno , Teamno2 , Queueno) async def update_nick(name): with open('Solo.json' , 'r') as f: Solo = json.load(f) nickname = str(Solo[name]["Elo"]) + "~" + str(Solo[name]["name"]) server = client.get_server("549553345044545536") member = server.get_member(name[2:len(name)-1]) member.nick = nickname async def display_updates(Solo_Teams , Teamno , Teamno2 , Queueno): embed = discord.Embed( title = "Updates:" ) embed.add_field(name = 'Winning Team + {}'.format(SoloCounter) , value = '\n'.join(Solo_Teams[str(Queueno)][str(Teamno)])) embed.add_field(name = 'Losing Team - {}'.format(SolominCounter) , value = '\n'.join(Solo_Teams[str(Queueno)][str(Teamno2)])) await client.say(embed = embed) ####Leave Queue ##### @client.command(pass_context = True) @commands.has_role('Registered') async def leaveQ(ctx): with open('Queue.json' , 'r') as f: Queue = json.load(f) await update_data_lQueue(Queue , ctx.message.author) with open('Queue.json' , 'w') as f: json.dump(Queue , f , indent = 2) async def update_data_lQueue( Queue , author): print(Queueiter) if author.mention in Queue[str(Queueiter)]: Queue[str(Queueiter)].remove(author.mention) await client.say("{} has left the queue".format(author.mention)) else: await client.say("{} is not in the queue".format(author.mention)) ###Create Queue Channel #### @client.command(pass_context = True) @commands.has_role('Mod') async def CreateQueueChannel(ctx): with open('Queue.json' , 'r') as f: Queue = json.load(f) Queue[Queueiter] = [] await client.say("Queue Channel is Created") with open('Queue.json' , 'w') as f: json.dump(Queue , f , indent = 2) #############Join Queue######### @client.command(pass_context = True) @commands.has_role('Registered') async def joinQ(ctx): with open('Solo.json' , 'r') as f: Solo = json.load(f) if ctx.message.author.mention in Solo: with open('Queue.json' , 'r') as f: Queue = json.load(f) await update_data_Queue( Queue , ctx.message.author) with open('Queue.json' , 'w') as f: json.dump(Queue , f , indent = 2) else: await client.say("{} is not registered".format(ctx.message.author)) async def update_data_Queue(Queue , author): global Queueiter Queueno = Queueiter if len(Queue["{}".format(Queueno)]) >= 9: Queue[str(Queueno)].append(author.mention) await DisplayQueue(Queue , Queueno) await Create_solo_teams(Queue , Queueno) Queueiter = Queueiter + 1 Queueno = Queueiter Queue[str(Queueno)] = [] else: if not author.mention in Queue[str(Queueno)]: Queue[str(Queueno)].append(author.mention) await client.say("{} joined".format(author.mention)) await DisplayQueue( Queue , Queueno) else: await client.say("{} already in queue" .format(author.mention)) async def DisplayQueue( Queue , Queueno): embed = discord.Embed( title = 'Queue:{}'.format(Queueno), description = "5 v 5 Custom Games:" ) embed.add_field(name = "Lobby" , value = '\n'.join(Queue[str(Queueno)]), inline = True) await client.say(embed = embed) async def Create_solo_teams(Queue , Queueno): with open('Solo_Teams.json' , 'r') as f: Solo_Teams = json.load(f) await update_Solo_teams(Solo_Teams , Queueno , Queue) with open('Solo_Teams.json' , 'w') as f: json.dump(Solo_Teams , f , indent = 2) async def update_Solo_teams( Solo_Teams , Queueno , Queue): if not Queueno in Solo_Teams: Solo_Teams[str(Queueno)] = {} Solo_Teams[str(Queueno)]["Team1"] = [] Solo_Teams[str(Queueno)]["Team2"] = [] for x in range(0 , 5): Queuerand = random.choice(Queue[str(Queueno)]) Queue[str(Queueno)].remove(Queuerand) Solo_Teams[str(Queueno)]["Team1"].append(Queuerand) for x in range(0 , 5): Queuerand = random.choice(Queue[str(Queueno)]) Queue[str(Queueno)].remove(Queuerand) Solo_Teams[str(Queueno)]["Team2"].append(Queuerand) await Display_solo_teams(Solo_Teams , Queueno) async def Display_solo_teams( Solo_Teams , Queueno): embed = discord.Embed( title = 'Queueno.:{}'.format(Queueno), description = '5 v 5 Custom Games' ) embed.add_field(name = "Team1:", value = '\n'.join(Solo_Teams[str(Queueno)]["Team1"]) , inline = True) embed.add_field(name = "Team2:", value = '\n'.join(Solo_Teams[str(Queueno)]["Team2"]) , inline = False) with open('Maps.json' , 'r') as f: Maps = json.load(f) embed.add_field(name = "Map:", value = random.choice(Maps["Maps"]) , inline = False) embed.add_field(name = "Host of The Match" , value = random.choice(Solo_Teams[str(Queueno)]["Team1"]) , inline = False) await client.say(embed = embed) if __name__ == '__main__': for extension in extensions: try: client.load_extension(extension) except Exception as e: print('Failed to load extension {}\n{}: {}'.format(extension, type(e).__name__, e)) client.run(TOKEN)
import discord import json import random import os from discord.ext import commands TOKEN = "" client = commands.Bot(command_prefix = '--') os.chdir(r'D:\Programming\Projects\Discord bot\jsonFiles') SoloCounter = 30 SolominCounter = 10 Queueiter = 1 T_Queueiter = 1 TeamCounter = 50 TeamminCounter = 20 extensions = [ "cogs.Matchmaking", "cogs.Moderator" ] @client.event async def on_ready(): botInfo = await client.application_info() oauthlink = discord.utils.oauth_url(botInfo.id) print('---------') print('Username: {}'.format(client.user.name)) print('ID: {}'.format(client.user.id)) print('Server count: {}'.format(str(len(client.servers)))) print('Member count: {}'.format(str(len(set(client.get_all_members()))))) print('OAuth URL: {}'.format(oauthlink)) print('Cogs: {}'.format(client.cogs)) print('---------') ######################### Register Team ################################# @client.command(pass_context = True) @commands.has_role('Registered') async def registerTeam( ctx , teamName , player1: discord.Member , player2: discord.Member , player3: discord.Member , player4: discord.Member , player5: discord.Member): if ctx.message.channel.id == "549911021511245834": with open('Teams.json' , 'r') as f: Teams = json.load(f) players = [player1 , player2 , player3 , player4 , player5] await update_data_Team(ctx , Teams , teamName , players) with open('Teams.json' , 'w') as f: json.dump(Teams , f , indent = 2) async def update_data_Team(ctx , Teams , teamName , players): if not teamName in Teams: Teams[teamName] = {} Teams[teamName]["teamElo"] = 0 Teams[teamName]["Players"] = [] Role = teamName await client.create_role(ctx.message.server , name = Role, hoist = True , mentionable = True ) TeamRole = discord.utils.get(ctx.message.server.roles , name = Role) for player in players: print(player) Teams[teamName]["Players"].append(player.mention) await client.add_roles(player , TeamRole) await client.say("{} is Registered as Team Cheers!!!!".format(teamName)) else: await client.say("you are already registered") ############################ Register Solo ################################### @client.command(pass_context = True) async def registersolo( ctx , name: discord.Member): if ctx.message.channel.id == "549911021511245834": with open('Solo.json' , 'r') as f: Solo = json.load(f) await update_data_solo(Solo , name , ctx) with open('Solo.json' , 'w') as f: json.dump(Solo , f , indent = 2) async def update_data_solo( Solo , name , player): if not player.message.author.mention in Solo: author = player.message.author.mention member = player.message.author Solo[author] = {} Solo[author]["name"] = name Solo[author]["Elo"] = 0 nickname = str(Solo[author]["Elo"]) + "~" + Solo[author]["name"] Role = discord.utils.get(player.message.server.roles , name = 'Registered') member.nick = nickname await client.add_roles(member , Role) await client.say("{} is Registered as Solo Cheers Guys!!!!".format(author)) else: await client.say("you are already registered") ############################### Win Team ################################ @client.command(pass_context = True) @commands.has_role('Mod') async def winT(ctx , T_Queueno , Team , Team2): with open('Teams_Queue.json' , 'r') as f: Teams_Queue = json.load(f) with open('Teams.json' , 'r') as f: Teams = json.load(f) Teams[Team]["teamElo"] = Teams[Team]["teamElo"] + TeamCounter Teams[Team2]["teamElo"] = Teams[Team2]["teamElo"] - TeamminCounter await display_win_team(Team , Team2) with open('Teams.json' , 'r') as f: json.dump(Teams , f , indent = 2) ###############CReate Team Queue Channel########################### @client.command(pass_context = True) @commands.has_role('Mod') async def CreateTQueueChannel(ctx): with open('Teams_Queue.json' , 'r') as f: Teams_Queue = json.load(f) Teams_Queue["1"] = [] with open('Teams_Queue.json' , 'w') as f: json.dump(Teams_Queue , f , indent = 2) ########################## Join Team Queue ################### @client.command(pass_context = True) @commands.has_role('Registered') async def joinQT(ctx , TeamName): if ctx.message.channel.id == "549910313995206687": with open('Teams.json' , 'r') as f: Teams = json.load(f) if "{}".format(TeamName) in Teams: with open('Teams_Queue.json' , 'r') as f: Teams_Queue = json.load(f) await update_data_Team_Queue(Teams_Queue , TeamName) with open('Teams_Queue.json' , 'w') as f: json.dump(Teams_Queue , f , indent = 2) else: await client.say("{} is not registerd".format(TeamName)) async def update_data_Team_Queue(Teams_Queue , TeamName): global T_Queueiter T_Queueno = T_Queueiter if len(Teams_Queue["{}".format(T_Queueno)]) >= 1: Teams_Queue[str(T_Queueno)].append(TeamName) await display_Team_Queue(T_Queueno , Teams_Queue , TeamName) await display_match(T_Queueno , Teams_Queue) T_Queueiter += 1 T_Queueno = T_Queueiter Teams_Queue[str(T_Queueno)] = [] else: if not TeamName in Teams_Queue[str(T_Queueno)]: Teams_Queue[str(T_Queueno)].append(TeamName) await display_Team_Queue(T_Queueno , Teams_Queue , TeamName) else: await client.say("{} is already in queue" .format(TeamName)) async def display_Team_Queue(T_Queueno , Teams_Queue , TeamName): embed = discord.Embed( title = "Team Queue : {}".format(T_Queueno), description = "5 v 5 Custom Games" ) embed.add_field(name = 'Team:' , value = "\n".join("<@{}>".format(Teams_Queue[T_Queueno])) , inline = False) await client.say(embed = embed) async def display_match(T_Queueno , Teams_Queue): embed = discord.Embed( title= "Team Matchup Queue : {}".format(T_Queueno), description = "5 v 5 Custom Games" ) embed.add_field(name = 'Teams:' , value = "\n".join(Teams_Queue[str(T_Queueno)]) , inline = False) with open('Maps.json' , 'r') as f: Maps = json.load(f) embed.add_field(name = 'Map:' , value = random.choice(Maps["Maps"])) await client.say(embed = embed) ################Show Queue################# @client.command(pass_context = True) @commands.has_role('Registered') async def showQ(ctx , Queueno): if ctx.message.channel.id == "549910313995206687": with open('Queue.json' , 'r') as f: Queue = json.load(f) if len(Queue[str(Queueno)]) < 0 : await client.say("Queue is empty") else: await DisplayQueue(Queue , Queueno) ###############Show Team Points########## @client.command(pass_context = True) @commadns.has_role('Registered') async def pointsT(ctx , TeamName): if ctx.message.channel.id == "551095980251021323": with open('Teams.json' , 'r') as f: Teams = json.load(f) if TeamName in Teams: await client.say("{}".format(Teams[TeamName][teamElo])) ####################Show Points ############### @client.command(pass_context = True) @commands.has_role('Registered') async def points(ctx): if ctx.message.channel.id == "551095980251021323": with open('Solo.json' , 'r') as f: Solo = json.load(f) if ctx.message.author.mention in Solo: await client.say("{}".format(Solo[ctx.message.author.mention]["Elo"]) + " points{}".format(ctx.message.author.mention)) ######################### Win Solo ############################## @client.command(pass_context = True) @commands.has_role('Mod' ) async def winS(ctx , Queueno , Teamno , Teamno2): with open('Solo_Teams.json' , 'r') as f: Solo_Teams = json.load(f) with open('Solo.json' , 'r') as f: Solo = json.load(f) await update_winS(Solo_Teams , Solo , Queueno , Teamno , Teamno2) with open('Solo.json' , 'w') as f: json.dump(Solo , f , indent = 2) async def update_winS(Solo_Teams , Solo , Queueno , Teamno , Teamno2): for player in Solo_Teams[str(Queueno)][str(Teamno)]: Solo[player]["Elo"] = Solo[player]["Elo"] + SoloCounter await update_nick(player) for players in Solo_Teams[str(Queueno)][str(Teamno2)]: Solo[players]["Elo"] = Solo[players]["Elo"] - SolominCounter await update_nick(player) await display_updates(Solo_Teams , Teamno , Teamno2 , Queueno) async def update_nick(name): with open('Solo.json' , 'r') as f: Solo = json.load(f) nickname = str(Solo[name]["Elo"]) + "~" + str(Solo[name]["name"]) server = client.get_server("549553345044545536") member = server.get_member(name[2:len(name)-1]) member.nick = nickname async def display_updates(Solo_Teams , Teamno , Teamno2 , Queueno): embed = discord.Embed( title = "Updates:" ) embed.add_field(name = 'Winning Team + {}'.format(SoloCounter) , value = '\n'.join(Solo_Teams[str(Queueno)][str(Teamno)])) embed.add_field(name = 'Losing Team - {}'.format(SolominCounter) , value = '\n'.join(Solo_Teams[str(Queueno)][str(Teamno2)])) await client.say(embed = embed) ####Leave Queue ##### @client.command(pass_context = True) @commands.has_role('Registered') async def leaveQ(ctx): with open('Queue.json' , 'r') as f: Queue = json.load(f) await update_data_lQueue(Queue , ctx.message.author) with open('Queue.json' , 'w') as f: json.dump(Queue , f , indent = 2) async def update_data_lQueue( Queue , author): print(Queueiter) if author.mention in Queue[str(Queueiter)]: Queue[str(Queueiter)].remove(author.mention) await client.say("{} has left the queue".format(author.mention)) else: await client.say("{} is not in the queue".format(author.mention)) ###Create Queue Channel #### @client.command(pass_context = True) @commands.has_role('Mod') async def CreateQueueChannel(ctx): with open('Queue.json' , 'r') as f: Queue = json.load(f) Queue[Queueiter] = [] await client.say("Queue Channel is Created") with open('Queue.json' , 'w') as f: json.dump(Queue , f , indent = 2) #############Join Queue######### @client.command(pass_context = True) @commands.has_role('Registered') async def joinQ(ctx): with open('Solo.json' , 'r') as f: Solo = json.load(f) if ctx.message.author.mention in Solo: with open('Queue.json' , 'r') as f: Queue = json.load(f) await update_data_Queue( Queue , ctx.message.author) with open('Queue.json' , 'w') as f: json.dump(Queue , f , indent = 2) else: await client.say("{} is not registered".format(ctx.message.author)) async def update_data_Queue(Queue , author): global Queueiter Queueno = Queueiter if len(Queue["{}".format(Queueno)]) >= 9: Queue[str(Queueno)].append(author.mention) await DisplayQueue(Queue , Queueno) await Create_solo_teams(Queue , Queueno) Queueiter = Queueiter + 1 Queueno = Queueiter Queue[str(Queueno)] = [] else: if not author.mention in Queue[str(Queueno)]: Queue[str(Queueno)].append(author.mention) await client.say("{} joined".format(author.mention)) await DisplayQueue( Queue , Queueno) else: await client.say("{} already in queue" .format(author.mention)) async def DisplayQueue( Queue , Queueno): embed = discord.Embed( title = 'Queue:{}'.format(Queueno), description = "5 v 5 Custom Games:" ) embed.add_field(name = "Lobby" , value = '\n'.join(Queue[str(Queueno)]), inline = True) await client.say(embed = embed) async def Create_solo_teams(Queue , Queueno): with open('Solo_Teams.json' , 'r') as f: Solo_Teams = json.load(f) await update_Solo_teams(Solo_Teams , Queueno , Queue) with open('Solo_Teams.json' , 'w') as f: json.dump(Solo_Teams , f , indent = 2) async def update_Solo_teams( Solo_Teams , Queueno , Queue): if not Queueno in Solo_Teams: Solo_Teams[str(Queueno)] = {} Solo_Teams[str(Queueno)]["Team1"] = [] Solo_Teams[str(Queueno)]["Team2"] = [] for x in range(0 , 5): Queuerand = random.choice(Queue[str(Queueno)]) Queue[str(Queueno)].remove(Queuerand) Solo_Teams[str(Queueno)]["Team1"].append(Queuerand) for x in range(0 , 5): Queuerand = random.choice(Queue[str(Queueno)]) Queue[str(Queueno)].remove(Queuerand) Solo_Teams[str(Queueno)]["Team2"].append(Queuerand) await Display_solo_teams(Solo_Teams , Queueno) async def Display_solo_teams( Solo_Teams , Queueno): embed = discord.Embed( title = 'Queueno.:{}'.format(Queueno), description = '5 v 5 Custom Games' ) embed.add_field(name = "Team1:", value = '\n'.join(Solo_Teams[str(Queueno)]["Team1"]) , inline = True) embed.add_field(name = "Team2:", value = '\n'.join(Solo_Teams[str(Queueno)]["Team2"]) , inline = False) with open('Maps.json' , 'r') as f: Maps = json.load(f) embed.add_field(name = "Map:", value = random.choice(Maps["Maps"]) , inline = False) embed.add_field(name = "Host of The Match" , value = random.choice(Solo_Teams[str(Queueno)]["Team1"]) , inline = False) await client.say(embed = embed) if __name__ == '__main__': for extension in extensions: try: client.load_extension(extension) except Exception as e: print('Failed to load extension {}\n{}: {}'.format(extension, type(e).__name__, e)) client.run(TOKEN)
de
0.796602
######################### Register Team ################################# ############################ Register Solo ################################### ############################### Win Team ################################ ###############CReate Team Queue Channel########################### ########################## Join Team Queue ################### ################Show Queue################# ###############Show Team Points########## ####################Show Points ############### ######################### Win Solo ############################## ####Leave Queue ##### ###Create Queue Channel #### #############Join Queue#########
2.628248
3
conversation.py
markemus/economy
2
9807
import database as d import numpy as np import random from transitions import Machine #Conversations are markov chains. Works as follows: a column vector for each CURRENT state j, a row vector for each TARGET state i. #Each entry i,j = the probability of moving to state i from state j. #target state D = end of conversation. We start in state D when initializing conversation. #row vectors sum to 1, internal lists are columns. #Conversation is a singleton. DO NOT CREATE NEW CONVERSATION OBJECTS. class Conversation(object): #a. stores, b.manufacturers, c.friends, d. myself, e.end conversation topicMatrix = [ [0.00,0.20,0.15,0.15,0.25], [0.20,0.00,0.15,0.15,0.25], [0.15,0.15,0.00,0.20,0.25], [0.15,0.15,0.20,0.00,0.25], [0.50,0.50,0.50,0.50,0.00] ] #a. different store, b. new topic, c. end convo, d. prices storeMatrix = [ [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.50], [1.0,1.0,0.25,0.00] ] #a. different manufacturer, b. new topic, c. end convo, d. prices manuMatrix = [ [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.50], [1.0,1.0,0.25,0.00] ] #a. different friend, b. new topic, c. end convo, d. family, e. job, /f. skills friendMatrix = [ [0.0,0.0,0.2,0.1,0.1], [0.0,0.0,0.2,0.2,0.2], [0.0,0.0,0.2,0.5,0.5], [0.5,0.5,0.2,0.0,0.2], [0.5,0.5,0.2,0.2,0.0] ] # friendMatrix = [ # [0.00,0.00,0.15,0.1,0.1,0.1], # [0.00,0.00,0.15,0.2,0.2,0.2], # [0.00,0.00,0.15,0.5,0.5,0.5], # [0.34,0.34,0.15,0.0,0.1,0.1], # [0.33,0.33,0.15,0.1,0.0,0.1], # [0.33,0.33,0.25,0.1,0.1,0.0] # ] #a. introduction, b. new topic, c. end convo, d. myfamily, e. myjob, /f. myskills myselfMatrix = [ [0.00,1,0.2,0.0,0.0], [0.25,0,0.2,0.2,0.2], [0.25,0,0.2,0.5,0.5], [0.25,0,0.2,0.0,0.3], [0.25,0,0.2,0.3,0.0] ] # myselfMatrix = [ # [0.0,1,0.15,0.00,0.00,0.00], # [0.2,0,0.15,0.20,0.20,0.20], # [0.2,0,0.15,0.50,0.50,0.50], # [0.2,0,0.15,0.00,0.15,0.15], # [0.2,0,0.15,0.15,0.00,0.15], # [0.2,0,0.15,0.15,0.15,0.00] # ] states = ['topic','store','manu','friend', 'myself', 'exit'] transitions = [ {'trigger' : 'toTopic', 'source' : '*', 'dest' : 'topic'}, {'trigger' : 'toStore', 'source' : 'topic', 'dest' : 'store'}, {'trigger' : 'toManu' , 'source' : 'topic', 'dest' : 'manu' }, {'trigger' : 'toFriend', 'source' : 'topic', 'dest' : 'friend' }, {'trigger' : 'toMyself', 'source' : 'topic', 'dest' : 'myself'}, {'trigger' : 'toExit', 'source' : '*', 'dest' : 'exit'} ] def __init__(self): self.isPlayer = False self.firstPerson = None self.secondPerson = None self.target = None self.machine = Machine(model=self, states=Conversation.states, transitions=Conversation.transitions, initial='exit') self.menuDict = { 'topic' : [self.toStore, self.toManu, self.toFriend, self.toMyself, self.toExit], 'store' : [self.different, self.toTopic, self.toExit, self.prices], 'manu' : [self.different, self.toTopic, self.toExit, self.prices], 'friend' : [self.different, self.toTopic, self.toExit, self.family, self.job], 'myself' : [self.introduction, self.toTopic, self.toExit, self.myfamily, self.myjob] } self.machine.on_enter_topic('topicHandler') self.machine.on_enter_store('storeHandler') self.machine.on_enter_manu('manuHandler') self.machine.on_enter_friend('friendHandler') self.machine.on_enter_myself('myselfHandler') self.machine.on_enter_exit('exitHandler') def beginConversation(self, firstPerson, secondPerson, isPlayer=False): self.isPlayer = isPlayer self.firstPerson = firstPerson self.secondPerson = secondPerson self.introduction() self.toTopic() def introduction(self): p2 = self.firstPerson.peopleManager(self.secondPerson) p1 = self.secondPerson.peopleManager(self.firstPerson) p2.name = self.secondPerson.name p1.name = self.firstPerson.name p2.updateOpinion(1) p1.updateOpinion(1) def different(self): if self.state == 'friend': testTarget = self.firstPerson.randomPerson(self.target) if testTarget is not None: self.target = testTarget.person else: self.target = None elif self.state == 'manu': testTarget = self.firstPerson.randomManu(self.target) if testTarget is not None: self.target = testTarget.store else: self.target = None elif self.state == 'store': testTarget = self.firstPerson.randomStore(self.target) if testTarget is not None: self.target = testTarget.store else: self.target = None def prices(self): if self.target is not None: firstProfile = self.firstPerson.unitManager(self.target, self.secondPerson) secondProfile = self.secondPerson.unitManager(self.target, self.firstPerson) firstPrices = firstProfile.getPricesWithDayNum() secondPrices = secondProfile.getPricesWithDayNum() firstDayNum = firstPrices[1] secondDayNum = secondPrices[1] if firstDayNum > secondDayNum: prices = firstPrices[0] secondProfile.updatePrices(prices, firstDayNum) #thoughts self.firstPerson.think("I told " + self.secondPerson.name + " about the prices at " + self.target.name + ".") self.secondPerson.think(self.firstPerson.name + " told me about the prices at " + self.target.name + ".") elif secondDayNum > firstDayNum: prices = secondPrices[0] firstProfile.updatePrices(prices, secondDayNum) #thoughts self.firstPerson.think(self.secondPerson.name + " told me about the prices at " + self.target.name + ".") self.secondPerson.think("I told " + self.firstPerson.name + " about the prices at " + self.target.name + ".") else: self.firstPerson.think(self.secondPerson.name + " and I talked about " + self.target.name + "'s prices.") self.secondPerson.think(self.firstPerson.name + " and I talked about " + self.target.name + "'s prices.") else: if self.state == 'store': self.firstPerson.think(self.secondPerson.name + " listened to me gripe about how I can't find anywhere to shop.") self.secondPerson.think(self.firstPerson.name + " told me that they can't find anywhere to shop.") elif self.state == 'manu': self.firstPerson.think("I mentioned to " + self.secondPerson.name + " that I don't know anything about the local industry.") self.secondPerson.think(self.firstPerson.name + " told me that they don't know much about the local industry.") else: self.firstPerson.think("There is a bug in conversation.prices. (not manu or store)") self.secondPerson.think("There is a bug in conversation.prices. (not manu or store)") def family(self): if self.target is not None: #info: family, people #profiles p1 = self.firstPerson.peopleManager(self.target) p2 = self.secondPerson.peopleManager(self.target) #variables f1 = p1.getFamily() f2 = p2.getFamily() ff = [] #update profiles for a, b in zip(f1, f2): if a[-1] >= b[-1]: ff.append(a) else: ff.append(b) p1.updateFamily(*ff) p2.updateFamily(*ff) #thoughts self.firstPerson.think(self.secondPerson.name + " and I gossipped about " + self.target.name + "'s family.") self.secondPerson.think(self.firstPerson.name + " and I gossipped about " + self.target.name + "'s family.") else: self.firstPerson.think("I don't really know anything about my friends' families.") self.secondPerson.think("I don't really know anything about my friends' families.") def job(self): if self.target is not None: #profiles firstProfile = self.firstPerson.peopleManager(self.target) secondProfile = self.secondPerson.peopleManager(self.target) #variables firstJob = firstProfile.getJob() secondJob = secondProfile.getJob() #update profiles if firstJob[1] > secondJob[1]: secondProfile.updateJob(*firstJob) self.firstPerson.think("I told " + self.secondPerson.name + " what " + self.target.name + " does for a living.") self.secondPerson.think(self.firstPerson.name + " told me what " + self.target.name + " does for a living.") elif secondJob[1] > firstJob[1]: firstProfile.updateJob(*secondJob) self.firstPerson.think(self.secondPerson.name + " told me what " + self.target.name + " does for a living.") self.secondPerson.think("I told " + self.firstPerson.name + " about " + self.target.name + " does for a living.") else: self.firstPerson.think(self.secondPerson.name + " and I talked about " + self.target.name + "'s job.") self.secondPerson.think(self.firstPerson.name + " and I talked about " + self.target.name + "'s job.") else: self.firstPerson.think("I don't know what any of my friends do for a living!") self.secondPerson.think("I don't know what any of my friends do for a living!") # def skills(self): # #info: skills # if self.target is not None: # #profiles # firstProfile = self.firstPerson.peopleManager(self.target) # secondProfile = self.secondPerson.peopleManager(self.target) # #variables # firstSkills = firstProfile.getSkills() # secondSkills = secondProfile.getSkills() # #update profiles # if firstSkills[1] > secondSkills[1]: # secondProfile.updateSkills(*firstSkills) # self.firstPerson.think("I told " + self.secondPerson.name + " about how good " + self.target.name + " is with their hands.") # self.secondPerson.think(self.firstPerson.name + " told me about how good " + self.target.name + " is with their hands.") # elif secondSkills[1] > firstSkills[1]: # firstProfile.updateSkills(*secondSkills) # self.firstPerson.think(self.secondPerson.name + " told me about how good " + self.target.name + " is with their hands.") # self.secondPerson.think("I told " + self.firstPerson.name + " about how good " + self.target.name + " is with their hands.") # else: # self.firstPerson.think(self.secondPerson.name + " and I talked about how good " + self.target.name + " is with their hands.") # self.secondPerson.think(self.firstPerson.name + " and I talked about how good " + self.target.name + " is with their hands.") # else: # self.firstPerson.think("I should spend more time doing things with my friends.") # self.secondPerson.think("I should spend more time doing things with my friends.") def myfamily(self): #info: family, people #profiles firstProfile = self.secondPerson.peopleManager(self.firstPerson) secondProfile = self.firstPerson.peopleManager(self.secondPerson) firstOwn = self.firstPerson.peopleManager(self.firstPerson) secondOwn = self.secondPerson.peopleManager(self.secondPerson) #update profiles firstProfile.updateFamily(firstOwn.getFather(), firstOwn.getMother(), firstOwn.getSpouse(), firstOwn.getSiblings(), firstOwn.getChildren()) secondProfile.updateFamily(secondOwn.getFather(), secondOwn.getMother(), secondOwn.getSpouse(), secondOwn.getSiblings(), secondOwn.getChildren()) #thoughts self.firstPerson.think(self.secondPerson.name + " caught me up on their family life.") self.secondPerson.think(self.firstPerson.name + " caught me up on their family life.") def myjob(self): #info: jobs, jobUnits, *salaries #profiles firstProfile = self.secondPerson.peopleManager(self.firstPerson) secondProfile = self.firstPerson.peopleManager(self.secondPerson) #variables firstJob = self.firstPerson.getJob() secondJob = self.secondPerson.getJob() dayNum = self.firstPerson.model.getDayNum() try: firstJobType = firstJob.getJobType() firstJobUnit = firstJob.getUnit() firstJobLoc = firstJobUnit.getName() firstSalary = firstJob.getSalary() except: firstJobType = "Jobhunter" firstJobUnit = None firstJobLoc = "home" firstSalary = 0 try: secondJobType = secondJob.getJobType() secondJobUnit = secondJob.getUnit() secondJobLoc = secondJobUnit.getName() secondSalary = secondJob.getSalary() except: secondJobType = "Jobhunter" secondJobUnit = None secondJobLoc = "home" secondSalary = 0 #update profiles if dayNum > firstProfile.getJob()[1]: firstProfile.updateJob(firstJob, dayNum) if dayNum > firstProfile.getSalary()[1]: firstProfile.updateSalary(firstSalary, dayNum) if dayNum > secondProfile.getJob()[1]: secondProfile.updateJob(secondJob, dayNum) if dayNum > secondProfile.getSalary()[1]: secondProfile.updateSalary(firstSalary, dayNum) if firstJobUnit is not None: self.secondPerson.unitManager(firstJobUnit, self.firstPerson) if secondJobUnit is not None: self.firstPerson.unitManager(secondJobUnit, self.secondPerson) #thoughts self.firstPerson.think(self.secondPerson.name + " told me about their job as a " + secondJobType + " at " + secondJobLoc + ".") self.secondPerson.think(self.firstPerson.name + " told me about their job as a " + firstJobType + " at " + firstJobLoc + ".") # def myskills(self): # #info skills # #profiles # firstProfile = self.secondPerson.peopleManager(self.firstPerson) # secondProfile = self.firstPerson.peopleManager(self.secondPerson) # #variables # firstSkills = self.firstPerson.getSkills() # secondSkills = self.secondPerson.getSkills() # dayNum = self.firstPerson.model.getDayNum() # #update profiles # if dayNum > firstProfile.getSkills()[1]: # firstProfile.updateSkills(firstSkills, dayNum) # if dayNum > secondProfile.getSkills()[1]: # secondProfile.updateSkills(secondSkills, dayNum) # #thoughts # self.firstPerson.think(self.secondPerson.name + " and I talked shop for a while.") # self.secondPerson.think(self.firstPerson.name + " and I talked shop for a while.") #dialogues are chosen here, but the actual method call is in the handler (eg prices) def talk(self, matrix, stateVector): if self.isPlayer: # stateVector = playerChoice pass else: #get dialogue probabilities given last dialogue probArray = np.dot(matrix, stateVector) prob = probArray.tolist() #choose dialogue choice = random.random() stateVector = [0 for i in range(len(prob))] for i in range(len(prob)): outcome = prob[i] if outcome >= choice: stateVector[i] = 1 return stateVector else: choice = choice - outcome def topicHandler(self): matrix = Conversation.topicMatrix stateVector = [0,0,0,0,1] # self.firstPerson.think("topicHandler") stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def storeHandler(self): matrix = Conversation.storeMatrix stateVector = [0,1,0,0] # self.firstPerson.think("storeHandler") self.different() while self.state == 'store': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def manuHandler(self): matrix = Conversation.manuMatrix stateVector = [0,1,0,0] # self.firstPerson.think("manuHandler") self.different() while self.state == 'manu': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def friendHandler(self): matrix = Conversation.friendMatrix stateVector = [0,1,0,0,0] # self.firstPerson.think("friendHandler") self.different() while self.state == 'friend': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def myselfHandler(self): matrix = Conversation.myselfMatrix stateVector = [0,1,0,0,0] # self.firstPerson.think("myselfHandler") while self.state == 'myself': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def exitHandler(self): self.isPlayer = False Convo = Conversation()
import database as d import numpy as np import random from transitions import Machine #Conversations are markov chains. Works as follows: a column vector for each CURRENT state j, a row vector for each TARGET state i. #Each entry i,j = the probability of moving to state i from state j. #target state D = end of conversation. We start in state D when initializing conversation. #row vectors sum to 1, internal lists are columns. #Conversation is a singleton. DO NOT CREATE NEW CONVERSATION OBJECTS. class Conversation(object): #a. stores, b.manufacturers, c.friends, d. myself, e.end conversation topicMatrix = [ [0.00,0.20,0.15,0.15,0.25], [0.20,0.00,0.15,0.15,0.25], [0.15,0.15,0.00,0.20,0.25], [0.15,0.15,0.20,0.00,0.25], [0.50,0.50,0.50,0.50,0.00] ] #a. different store, b. new topic, c. end convo, d. prices storeMatrix = [ [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.50], [1.0,1.0,0.25,0.00] ] #a. different manufacturer, b. new topic, c. end convo, d. prices manuMatrix = [ [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.25], [0.0,0.0,0.25,0.50], [1.0,1.0,0.25,0.00] ] #a. different friend, b. new topic, c. end convo, d. family, e. job, /f. skills friendMatrix = [ [0.0,0.0,0.2,0.1,0.1], [0.0,0.0,0.2,0.2,0.2], [0.0,0.0,0.2,0.5,0.5], [0.5,0.5,0.2,0.0,0.2], [0.5,0.5,0.2,0.2,0.0] ] # friendMatrix = [ # [0.00,0.00,0.15,0.1,0.1,0.1], # [0.00,0.00,0.15,0.2,0.2,0.2], # [0.00,0.00,0.15,0.5,0.5,0.5], # [0.34,0.34,0.15,0.0,0.1,0.1], # [0.33,0.33,0.15,0.1,0.0,0.1], # [0.33,0.33,0.25,0.1,0.1,0.0] # ] #a. introduction, b. new topic, c. end convo, d. myfamily, e. myjob, /f. myskills myselfMatrix = [ [0.00,1,0.2,0.0,0.0], [0.25,0,0.2,0.2,0.2], [0.25,0,0.2,0.5,0.5], [0.25,0,0.2,0.0,0.3], [0.25,0,0.2,0.3,0.0] ] # myselfMatrix = [ # [0.0,1,0.15,0.00,0.00,0.00], # [0.2,0,0.15,0.20,0.20,0.20], # [0.2,0,0.15,0.50,0.50,0.50], # [0.2,0,0.15,0.00,0.15,0.15], # [0.2,0,0.15,0.15,0.00,0.15], # [0.2,0,0.15,0.15,0.15,0.00] # ] states = ['topic','store','manu','friend', 'myself', 'exit'] transitions = [ {'trigger' : 'toTopic', 'source' : '*', 'dest' : 'topic'}, {'trigger' : 'toStore', 'source' : 'topic', 'dest' : 'store'}, {'trigger' : 'toManu' , 'source' : 'topic', 'dest' : 'manu' }, {'trigger' : 'toFriend', 'source' : 'topic', 'dest' : 'friend' }, {'trigger' : 'toMyself', 'source' : 'topic', 'dest' : 'myself'}, {'trigger' : 'toExit', 'source' : '*', 'dest' : 'exit'} ] def __init__(self): self.isPlayer = False self.firstPerson = None self.secondPerson = None self.target = None self.machine = Machine(model=self, states=Conversation.states, transitions=Conversation.transitions, initial='exit') self.menuDict = { 'topic' : [self.toStore, self.toManu, self.toFriend, self.toMyself, self.toExit], 'store' : [self.different, self.toTopic, self.toExit, self.prices], 'manu' : [self.different, self.toTopic, self.toExit, self.prices], 'friend' : [self.different, self.toTopic, self.toExit, self.family, self.job], 'myself' : [self.introduction, self.toTopic, self.toExit, self.myfamily, self.myjob] } self.machine.on_enter_topic('topicHandler') self.machine.on_enter_store('storeHandler') self.machine.on_enter_manu('manuHandler') self.machine.on_enter_friend('friendHandler') self.machine.on_enter_myself('myselfHandler') self.machine.on_enter_exit('exitHandler') def beginConversation(self, firstPerson, secondPerson, isPlayer=False): self.isPlayer = isPlayer self.firstPerson = firstPerson self.secondPerson = secondPerson self.introduction() self.toTopic() def introduction(self): p2 = self.firstPerson.peopleManager(self.secondPerson) p1 = self.secondPerson.peopleManager(self.firstPerson) p2.name = self.secondPerson.name p1.name = self.firstPerson.name p2.updateOpinion(1) p1.updateOpinion(1) def different(self): if self.state == 'friend': testTarget = self.firstPerson.randomPerson(self.target) if testTarget is not None: self.target = testTarget.person else: self.target = None elif self.state == 'manu': testTarget = self.firstPerson.randomManu(self.target) if testTarget is not None: self.target = testTarget.store else: self.target = None elif self.state == 'store': testTarget = self.firstPerson.randomStore(self.target) if testTarget is not None: self.target = testTarget.store else: self.target = None def prices(self): if self.target is not None: firstProfile = self.firstPerson.unitManager(self.target, self.secondPerson) secondProfile = self.secondPerson.unitManager(self.target, self.firstPerson) firstPrices = firstProfile.getPricesWithDayNum() secondPrices = secondProfile.getPricesWithDayNum() firstDayNum = firstPrices[1] secondDayNum = secondPrices[1] if firstDayNum > secondDayNum: prices = firstPrices[0] secondProfile.updatePrices(prices, firstDayNum) #thoughts self.firstPerson.think("I told " + self.secondPerson.name + " about the prices at " + self.target.name + ".") self.secondPerson.think(self.firstPerson.name + " told me about the prices at " + self.target.name + ".") elif secondDayNum > firstDayNum: prices = secondPrices[0] firstProfile.updatePrices(prices, secondDayNum) #thoughts self.firstPerson.think(self.secondPerson.name + " told me about the prices at " + self.target.name + ".") self.secondPerson.think("I told " + self.firstPerson.name + " about the prices at " + self.target.name + ".") else: self.firstPerson.think(self.secondPerson.name + " and I talked about " + self.target.name + "'s prices.") self.secondPerson.think(self.firstPerson.name + " and I talked about " + self.target.name + "'s prices.") else: if self.state == 'store': self.firstPerson.think(self.secondPerson.name + " listened to me gripe about how I can't find anywhere to shop.") self.secondPerson.think(self.firstPerson.name + " told me that they can't find anywhere to shop.") elif self.state == 'manu': self.firstPerson.think("I mentioned to " + self.secondPerson.name + " that I don't know anything about the local industry.") self.secondPerson.think(self.firstPerson.name + " told me that they don't know much about the local industry.") else: self.firstPerson.think("There is a bug in conversation.prices. (not manu or store)") self.secondPerson.think("There is a bug in conversation.prices. (not manu or store)") def family(self): if self.target is not None: #info: family, people #profiles p1 = self.firstPerson.peopleManager(self.target) p2 = self.secondPerson.peopleManager(self.target) #variables f1 = p1.getFamily() f2 = p2.getFamily() ff = [] #update profiles for a, b in zip(f1, f2): if a[-1] >= b[-1]: ff.append(a) else: ff.append(b) p1.updateFamily(*ff) p2.updateFamily(*ff) #thoughts self.firstPerson.think(self.secondPerson.name + " and I gossipped about " + self.target.name + "'s family.") self.secondPerson.think(self.firstPerson.name + " and I gossipped about " + self.target.name + "'s family.") else: self.firstPerson.think("I don't really know anything about my friends' families.") self.secondPerson.think("I don't really know anything about my friends' families.") def job(self): if self.target is not None: #profiles firstProfile = self.firstPerson.peopleManager(self.target) secondProfile = self.secondPerson.peopleManager(self.target) #variables firstJob = firstProfile.getJob() secondJob = secondProfile.getJob() #update profiles if firstJob[1] > secondJob[1]: secondProfile.updateJob(*firstJob) self.firstPerson.think("I told " + self.secondPerson.name + " what " + self.target.name + " does for a living.") self.secondPerson.think(self.firstPerson.name + " told me what " + self.target.name + " does for a living.") elif secondJob[1] > firstJob[1]: firstProfile.updateJob(*secondJob) self.firstPerson.think(self.secondPerson.name + " told me what " + self.target.name + " does for a living.") self.secondPerson.think("I told " + self.firstPerson.name + " about " + self.target.name + " does for a living.") else: self.firstPerson.think(self.secondPerson.name + " and I talked about " + self.target.name + "'s job.") self.secondPerson.think(self.firstPerson.name + " and I talked about " + self.target.name + "'s job.") else: self.firstPerson.think("I don't know what any of my friends do for a living!") self.secondPerson.think("I don't know what any of my friends do for a living!") # def skills(self): # #info: skills # if self.target is not None: # #profiles # firstProfile = self.firstPerson.peopleManager(self.target) # secondProfile = self.secondPerson.peopleManager(self.target) # #variables # firstSkills = firstProfile.getSkills() # secondSkills = secondProfile.getSkills() # #update profiles # if firstSkills[1] > secondSkills[1]: # secondProfile.updateSkills(*firstSkills) # self.firstPerson.think("I told " + self.secondPerson.name + " about how good " + self.target.name + " is with their hands.") # self.secondPerson.think(self.firstPerson.name + " told me about how good " + self.target.name + " is with their hands.") # elif secondSkills[1] > firstSkills[1]: # firstProfile.updateSkills(*secondSkills) # self.firstPerson.think(self.secondPerson.name + " told me about how good " + self.target.name + " is with their hands.") # self.secondPerson.think("I told " + self.firstPerson.name + " about how good " + self.target.name + " is with their hands.") # else: # self.firstPerson.think(self.secondPerson.name + " and I talked about how good " + self.target.name + " is with their hands.") # self.secondPerson.think(self.firstPerson.name + " and I talked about how good " + self.target.name + " is with their hands.") # else: # self.firstPerson.think("I should spend more time doing things with my friends.") # self.secondPerson.think("I should spend more time doing things with my friends.") def myfamily(self): #info: family, people #profiles firstProfile = self.secondPerson.peopleManager(self.firstPerson) secondProfile = self.firstPerson.peopleManager(self.secondPerson) firstOwn = self.firstPerson.peopleManager(self.firstPerson) secondOwn = self.secondPerson.peopleManager(self.secondPerson) #update profiles firstProfile.updateFamily(firstOwn.getFather(), firstOwn.getMother(), firstOwn.getSpouse(), firstOwn.getSiblings(), firstOwn.getChildren()) secondProfile.updateFamily(secondOwn.getFather(), secondOwn.getMother(), secondOwn.getSpouse(), secondOwn.getSiblings(), secondOwn.getChildren()) #thoughts self.firstPerson.think(self.secondPerson.name + " caught me up on their family life.") self.secondPerson.think(self.firstPerson.name + " caught me up on their family life.") def myjob(self): #info: jobs, jobUnits, *salaries #profiles firstProfile = self.secondPerson.peopleManager(self.firstPerson) secondProfile = self.firstPerson.peopleManager(self.secondPerson) #variables firstJob = self.firstPerson.getJob() secondJob = self.secondPerson.getJob() dayNum = self.firstPerson.model.getDayNum() try: firstJobType = firstJob.getJobType() firstJobUnit = firstJob.getUnit() firstJobLoc = firstJobUnit.getName() firstSalary = firstJob.getSalary() except: firstJobType = "Jobhunter" firstJobUnit = None firstJobLoc = "home" firstSalary = 0 try: secondJobType = secondJob.getJobType() secondJobUnit = secondJob.getUnit() secondJobLoc = secondJobUnit.getName() secondSalary = secondJob.getSalary() except: secondJobType = "Jobhunter" secondJobUnit = None secondJobLoc = "home" secondSalary = 0 #update profiles if dayNum > firstProfile.getJob()[1]: firstProfile.updateJob(firstJob, dayNum) if dayNum > firstProfile.getSalary()[1]: firstProfile.updateSalary(firstSalary, dayNum) if dayNum > secondProfile.getJob()[1]: secondProfile.updateJob(secondJob, dayNum) if dayNum > secondProfile.getSalary()[1]: secondProfile.updateSalary(firstSalary, dayNum) if firstJobUnit is not None: self.secondPerson.unitManager(firstJobUnit, self.firstPerson) if secondJobUnit is not None: self.firstPerson.unitManager(secondJobUnit, self.secondPerson) #thoughts self.firstPerson.think(self.secondPerson.name + " told me about their job as a " + secondJobType + " at " + secondJobLoc + ".") self.secondPerson.think(self.firstPerson.name + " told me about their job as a " + firstJobType + " at " + firstJobLoc + ".") # def myskills(self): # #info skills # #profiles # firstProfile = self.secondPerson.peopleManager(self.firstPerson) # secondProfile = self.firstPerson.peopleManager(self.secondPerson) # #variables # firstSkills = self.firstPerson.getSkills() # secondSkills = self.secondPerson.getSkills() # dayNum = self.firstPerson.model.getDayNum() # #update profiles # if dayNum > firstProfile.getSkills()[1]: # firstProfile.updateSkills(firstSkills, dayNum) # if dayNum > secondProfile.getSkills()[1]: # secondProfile.updateSkills(secondSkills, dayNum) # #thoughts # self.firstPerson.think(self.secondPerson.name + " and I talked shop for a while.") # self.secondPerson.think(self.firstPerson.name + " and I talked shop for a while.") #dialogues are chosen here, but the actual method call is in the handler (eg prices) def talk(self, matrix, stateVector): if self.isPlayer: # stateVector = playerChoice pass else: #get dialogue probabilities given last dialogue probArray = np.dot(matrix, stateVector) prob = probArray.tolist() #choose dialogue choice = random.random() stateVector = [0 for i in range(len(prob))] for i in range(len(prob)): outcome = prob[i] if outcome >= choice: stateVector[i] = 1 return stateVector else: choice = choice - outcome def topicHandler(self): matrix = Conversation.topicMatrix stateVector = [0,0,0,0,1] # self.firstPerson.think("topicHandler") stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def storeHandler(self): matrix = Conversation.storeMatrix stateVector = [0,1,0,0] # self.firstPerson.think("storeHandler") self.different() while self.state == 'store': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def manuHandler(self): matrix = Conversation.manuMatrix stateVector = [0,1,0,0] # self.firstPerson.think("manuHandler") self.different() while self.state == 'manu': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def friendHandler(self): matrix = Conversation.friendMatrix stateVector = [0,1,0,0,0] # self.firstPerson.think("friendHandler") self.different() while self.state == 'friend': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def myselfHandler(self): matrix = Conversation.myselfMatrix stateVector = [0,1,0,0,0] # self.firstPerson.think("myselfHandler") while self.state == 'myself': stateVector = self.talk(matrix, stateVector) for i in range(len(stateVector)): if stateVector[i] == 1: self.menuDict[self.state][i]() break def exitHandler(self): self.isPlayer = False Convo = Conversation()
en
0.742457
#Conversations are markov chains. Works as follows: a column vector for each CURRENT state j, a row vector for each TARGET state i. #Each entry i,j = the probability of moving to state i from state j. #target state D = end of conversation. We start in state D when initializing conversation. #row vectors sum to 1, internal lists are columns. #Conversation is a singleton. DO NOT CREATE NEW CONVERSATION OBJECTS. #a. stores, b.manufacturers, c.friends, d. myself, e.end conversation #a. different store, b. new topic, c. end convo, d. prices #a. different manufacturer, b. new topic, c. end convo, d. prices #a. different friend, b. new topic, c. end convo, d. family, e. job, /f. skills # friendMatrix = [ # [0.00,0.00,0.15,0.1,0.1,0.1], # [0.00,0.00,0.15,0.2,0.2,0.2], # [0.00,0.00,0.15,0.5,0.5,0.5], # [0.34,0.34,0.15,0.0,0.1,0.1], # [0.33,0.33,0.15,0.1,0.0,0.1], # [0.33,0.33,0.25,0.1,0.1,0.0] # ] #a. introduction, b. new topic, c. end convo, d. myfamily, e. myjob, /f. myskills # myselfMatrix = [ # [0.0,1,0.15,0.00,0.00,0.00], # [0.2,0,0.15,0.20,0.20,0.20], # [0.2,0,0.15,0.50,0.50,0.50], # [0.2,0,0.15,0.00,0.15,0.15], # [0.2,0,0.15,0.15,0.00,0.15], # [0.2,0,0.15,0.15,0.15,0.00] # ] #thoughts #thoughts #info: family, people #profiles #variables #update profiles #thoughts #profiles #variables #update profiles # def skills(self): # #info: skills # if self.target is not None: # #profiles # firstProfile = self.firstPerson.peopleManager(self.target) # secondProfile = self.secondPerson.peopleManager(self.target) # #variables # firstSkills = firstProfile.getSkills() # secondSkills = secondProfile.getSkills() # #update profiles # if firstSkills[1] > secondSkills[1]: # secondProfile.updateSkills(*firstSkills) # self.firstPerson.think("I told " + self.secondPerson.name + " about how good " + self.target.name + " is with their hands.") # self.secondPerson.think(self.firstPerson.name + " told me about how good " + self.target.name + " is with their hands.") # elif secondSkills[1] > firstSkills[1]: # firstProfile.updateSkills(*secondSkills) # self.firstPerson.think(self.secondPerson.name + " told me about how good " + self.target.name + " is with their hands.") # self.secondPerson.think("I told " + self.firstPerson.name + " about how good " + self.target.name + " is with their hands.") # else: # self.firstPerson.think(self.secondPerson.name + " and I talked about how good " + self.target.name + " is with their hands.") # self.secondPerson.think(self.firstPerson.name + " and I talked about how good " + self.target.name + " is with their hands.") # else: # self.firstPerson.think("I should spend more time doing things with my friends.") # self.secondPerson.think("I should spend more time doing things with my friends.") #info: family, people #profiles #update profiles #thoughts #info: jobs, jobUnits, *salaries #profiles #variables #update profiles #thoughts # def myskills(self): # #info skills # #profiles # firstProfile = self.secondPerson.peopleManager(self.firstPerson) # secondProfile = self.firstPerson.peopleManager(self.secondPerson) # #variables # firstSkills = self.firstPerson.getSkills() # secondSkills = self.secondPerson.getSkills() # dayNum = self.firstPerson.model.getDayNum() # #update profiles # if dayNum > firstProfile.getSkills()[1]: # firstProfile.updateSkills(firstSkills, dayNum) # if dayNum > secondProfile.getSkills()[1]: # secondProfile.updateSkills(secondSkills, dayNum) # #thoughts # self.firstPerson.think(self.secondPerson.name + " and I talked shop for a while.") # self.secondPerson.think(self.firstPerson.name + " and I talked shop for a while.") #dialogues are chosen here, but the actual method call is in the handler (eg prices) # stateVector = playerChoice #get dialogue probabilities given last dialogue #choose dialogue # self.firstPerson.think("topicHandler") # self.firstPerson.think("storeHandler") # self.firstPerson.think("manuHandler") # self.firstPerson.think("friendHandler") # self.firstPerson.think("myselfHandler")
3.189616
3
src/createData.py
saijananiganesan/SimPathFinder
0
9808
from __init__ import ExtractUnlabeledData, SampleUnlabeledData, ExtractLabeledData E = ExtractLabeledData(data_dir='../labeldata/') E.get_pathways() E.get_pathway_names() E.get_classes_dict() E.create_df_all_labels()
from __init__ import ExtractUnlabeledData, SampleUnlabeledData, ExtractLabeledData E = ExtractLabeledData(data_dir='../labeldata/') E.get_pathways() E.get_pathway_names() E.get_classes_dict() E.create_df_all_labels()
none
1
1.652122
2
blog/views.py
farman99ahmed/diyblog
0
9809
from django.shortcuts import render, redirect from .forms import AuthorForm, BlogForm, NewUserForm from .models import Author, Blog from django.contrib.auth import login, authenticate, logout from django.contrib import messages from django.contrib.auth.forms import AuthenticationForm from django.contrib.auth.decorators import login_required # Create your views here. def get_authors(request): context = {'authors': Author.objects.all()} return render(request, "blog/get_authors.html", context) @login_required def get_author(request, id): author = Author.objects.get(pk = id) blogs = Blog.objects.filter(author = id) context = {'author': author, 'blogs': blogs} return render(request, "blog/get_author.html", context) @login_required def post_put_author(request, id = 0): if request.method == "GET": if id == 0: form = AuthorForm() else: author = Author.objects.get(pk = id) form = AuthorForm(instance = author) return render(request, "blog/post_put_authors.html", {"form": form}) else: if id == 0: form = AuthorForm(request.POST) else: author = Author.objects.get(pk = id) form = AuthorForm(request.POST, instance = author) if form.is_valid(): form.save() return redirect('get_authors') @login_required def delete_author(request, id): author = Author.objects.get(pk = id) author.delete() return redirect('get_authors') def get_blogs(request): context = {'blogs': Blog.objects.all()} return render(request, "blog/get_blogs.html", context) @login_required def get_blog(request, id): blog = {'blog': Blog.objects.get(pk = id)} return render(request, "blog/get_blog.html", blog) @login_required def post_put_blog(request, id = 0): if request.method == "GET": if id == 0: form = BlogForm() else: blog = Blog.objects.get(pk = id) form = BlogForm(instance = blog) return render(request, "blog/post_put_blogs.html", {"form": form}) else: if id == 0: form = BlogForm(request.POST) else: blog = Blog.objects.get(pk = id) form = BlogForm(request.POST, instance = blog) if form.is_valid(): form.save() return redirect('get_blogs') @login_required def delete_blog(request, id): blog = Blog.objects.get(pk = id) blog.delete() return redirect('get_blogs') def register_request(request): if request.method == "POST": form = NewUserForm(request.POST) if form.is_valid(): user = form.save() login(request, user) messages.success(request, "Registration successful." ) return redirect("get_blogs") messages.error(request, "Unsuccessful registration. Invalid information.") form = NewUserForm() return render (request=request, template_name="blog/register.html", context={"register_form":form}) def login_request(request): if request.method == "POST": form = AuthenticationForm(request, data=request.POST) if form.is_valid(): username = form.cleaned_data.get('username') password = form.cleaned_data.get('password') user = authenticate(username=username, password=password) if user is not None: login(request, user) messages.info(request, f"You are now logged in as {username}.") return redirect("get_blogs") else: messages.error(request,"Invalid username or password.") else: messages.error(request,"Invalid username or password.") form = AuthenticationForm() return render(request=request, template_name="blog/login.html", context={"login_form":form}) def logout_request(request): logout(request) messages.info(request, "You have successfully logged out.") return redirect("get_blogs")
from django.shortcuts import render, redirect from .forms import AuthorForm, BlogForm, NewUserForm from .models import Author, Blog from django.contrib.auth import login, authenticate, logout from django.contrib import messages from django.contrib.auth.forms import AuthenticationForm from django.contrib.auth.decorators import login_required # Create your views here. def get_authors(request): context = {'authors': Author.objects.all()} return render(request, "blog/get_authors.html", context) @login_required def get_author(request, id): author = Author.objects.get(pk = id) blogs = Blog.objects.filter(author = id) context = {'author': author, 'blogs': blogs} return render(request, "blog/get_author.html", context) @login_required def post_put_author(request, id = 0): if request.method == "GET": if id == 0: form = AuthorForm() else: author = Author.objects.get(pk = id) form = AuthorForm(instance = author) return render(request, "blog/post_put_authors.html", {"form": form}) else: if id == 0: form = AuthorForm(request.POST) else: author = Author.objects.get(pk = id) form = AuthorForm(request.POST, instance = author) if form.is_valid(): form.save() return redirect('get_authors') @login_required def delete_author(request, id): author = Author.objects.get(pk = id) author.delete() return redirect('get_authors') def get_blogs(request): context = {'blogs': Blog.objects.all()} return render(request, "blog/get_blogs.html", context) @login_required def get_blog(request, id): blog = {'blog': Blog.objects.get(pk = id)} return render(request, "blog/get_blog.html", blog) @login_required def post_put_blog(request, id = 0): if request.method == "GET": if id == 0: form = BlogForm() else: blog = Blog.objects.get(pk = id) form = BlogForm(instance = blog) return render(request, "blog/post_put_blogs.html", {"form": form}) else: if id == 0: form = BlogForm(request.POST) else: blog = Blog.objects.get(pk = id) form = BlogForm(request.POST, instance = blog) if form.is_valid(): form.save() return redirect('get_blogs') @login_required def delete_blog(request, id): blog = Blog.objects.get(pk = id) blog.delete() return redirect('get_blogs') def register_request(request): if request.method == "POST": form = NewUserForm(request.POST) if form.is_valid(): user = form.save() login(request, user) messages.success(request, "Registration successful." ) return redirect("get_blogs") messages.error(request, "Unsuccessful registration. Invalid information.") form = NewUserForm() return render (request=request, template_name="blog/register.html", context={"register_form":form}) def login_request(request): if request.method == "POST": form = AuthenticationForm(request, data=request.POST) if form.is_valid(): username = form.cleaned_data.get('username') password = form.cleaned_data.get('password') user = authenticate(username=username, password=password) if user is not None: login(request, user) messages.info(request, f"You are now logged in as {username}.") return redirect("get_blogs") else: messages.error(request,"Invalid username or password.") else: messages.error(request,"Invalid username or password.") form = AuthenticationForm() return render(request=request, template_name="blog/login.html", context={"login_form":form}) def logout_request(request): logout(request) messages.info(request, "You have successfully logged out.") return redirect("get_blogs")
en
0.968116
# Create your views here.
2.268393
2
tests/conftest.py
SolomidHero/speech-regeneration-enhancer
8
9810
<filename>tests/conftest.py # here we make fixtures of toy data # real parameters are stored and accessed from config import pytest import librosa import os import numpy as np from hydra.experimental import compose, initialize @pytest.fixture(scope="session") def cfg(): with initialize(config_path="../", job_name="test_app"): config = compose(config_name="config") config.dataset = compose(config_name="tests/test_dataset_config") config.train = compose(config_name="tests/test_train_config") return config @pytest.fixture(scope="session") def sample_rate(cfg): return cfg.data.sample_rate @pytest.fixture(scope="session") def example_wav(sample_rate): wav, sr = librosa.load( os.path.dirname(__file__) + "/data/example.mp3", sr=sample_rate, dtype=np.float32, ) return { 'wav': wav, 'sr': sr } @pytest.fixture(scope="session") def n_fft(cfg): return cfg.data.n_fft @pytest.fixture(scope="session") def hop_length(cfg): return cfg.data.hop_length @pytest.fixture(scope="session") def win_length(cfg): return cfg.data.win_length @pytest.fixture(scope="session") def f_min(cfg): return cfg.data.f_min @pytest.fixture(scope="session") def f_max(cfg): return cfg.data.f_max @pytest.fixture(scope="session") def hop_ms(example_wav, hop_length): return 1e3 * hop_length / example_wav['sr'] @pytest.fixture(scope="session") def n_frames(example_wav, hop_length): return (example_wav['wav'].shape[-1] - 1) // hop_length + 1 # It is not clear if we should cleanup the test directories # or leave them for debugging # https://github.com/pytest-dev/pytest/issues/3051 @pytest.fixture(autouse=True, scope='session') def clear_files_teardown(): yield None os.system("rm -r tests/test_dataset tests/test_experiment tests/test_logs")
<filename>tests/conftest.py # here we make fixtures of toy data # real parameters are stored and accessed from config import pytest import librosa import os import numpy as np from hydra.experimental import compose, initialize @pytest.fixture(scope="session") def cfg(): with initialize(config_path="../", job_name="test_app"): config = compose(config_name="config") config.dataset = compose(config_name="tests/test_dataset_config") config.train = compose(config_name="tests/test_train_config") return config @pytest.fixture(scope="session") def sample_rate(cfg): return cfg.data.sample_rate @pytest.fixture(scope="session") def example_wav(sample_rate): wav, sr = librosa.load( os.path.dirname(__file__) + "/data/example.mp3", sr=sample_rate, dtype=np.float32, ) return { 'wav': wav, 'sr': sr } @pytest.fixture(scope="session") def n_fft(cfg): return cfg.data.n_fft @pytest.fixture(scope="session") def hop_length(cfg): return cfg.data.hop_length @pytest.fixture(scope="session") def win_length(cfg): return cfg.data.win_length @pytest.fixture(scope="session") def f_min(cfg): return cfg.data.f_min @pytest.fixture(scope="session") def f_max(cfg): return cfg.data.f_max @pytest.fixture(scope="session") def hop_ms(example_wav, hop_length): return 1e3 * hop_length / example_wav['sr'] @pytest.fixture(scope="session") def n_frames(example_wav, hop_length): return (example_wav['wav'].shape[-1] - 1) // hop_length + 1 # It is not clear if we should cleanup the test directories # or leave them for debugging # https://github.com/pytest-dev/pytest/issues/3051 @pytest.fixture(autouse=True, scope='session') def clear_files_teardown(): yield None os.system("rm -r tests/test_dataset tests/test_experiment tests/test_logs")
en
0.841165
# here we make fixtures of toy data # real parameters are stored and accessed from config # It is not clear if we should cleanup the test directories # or leave them for debugging # https://github.com/pytest-dev/pytest/issues/3051
1.974091
2
dataclassses_howto.py
CvanderStoep/VideosSampleCode
285
9811
<gh_stars>100-1000 import dataclasses import inspect from dataclasses import dataclass, field from pprint import pprint import attr class ManualComment: def __init__(self, id: int, text: str): self.id: int = id self.text: str = text def __repr__(self): return "{}(id={}, text={})".format(self.__class__.__name__, self.id, self.text) def __eq__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) == (other.id, other.text) else: return NotImplemented def __ne__(self, other): result = self.__eq__(other) if result is NotImplemented: return NotImplemented else: return not result def __hash__(self): return hash((self.__class__, self.id, self.text)) def __lt__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) < (other.id, other.text) else: return NotImplemented def __le__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) <= (other.id, other.text) else: return NotImplemented def __gt__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) > (other.id, other.text) else: return NotImplemented def __ge__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) >= (other.id, other.text) else: return NotImplemented @dataclass(frozen=True, order=True) class Comment: id: int text: str = "" replies: list[int] = field(default_factory=list, repr=False, compare=False) @attr.s(frozen=True, order=True, slots=True) class AttrComment: id: int = 0 text: str = "" def main(): comment = Comment(1, "I just subscribed!") # comment.id = 3 # can't immutable print(comment) print(dataclasses.astuple(comment)) print(dataclasses.asdict(comment)) copy = dataclasses.replace(comment, id=3) print(copy) pprint(inspect.getmembers(Comment, inspect.isfunction)) if __name__ == '__main__': main()
import dataclasses import inspect from dataclasses import dataclass, field from pprint import pprint import attr class ManualComment: def __init__(self, id: int, text: str): self.id: int = id self.text: str = text def __repr__(self): return "{}(id={}, text={})".format(self.__class__.__name__, self.id, self.text) def __eq__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) == (other.id, other.text) else: return NotImplemented def __ne__(self, other): result = self.__eq__(other) if result is NotImplemented: return NotImplemented else: return not result def __hash__(self): return hash((self.__class__, self.id, self.text)) def __lt__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) < (other.id, other.text) else: return NotImplemented def __le__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) <= (other.id, other.text) else: return NotImplemented def __gt__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) > (other.id, other.text) else: return NotImplemented def __ge__(self, other): if other.__class__ is self.__class__: return (self.id, self.text) >= (other.id, other.text) else: return NotImplemented @dataclass(frozen=True, order=True) class Comment: id: int text: str = "" replies: list[int] = field(default_factory=list, repr=False, compare=False) @attr.s(frozen=True, order=True, slots=True) class AttrComment: id: int = 0 text: str = "" def main(): comment = Comment(1, "I just subscribed!") # comment.id = 3 # can't immutable print(comment) print(dataclasses.astuple(comment)) print(dataclasses.asdict(comment)) copy = dataclasses.replace(comment, id=3) print(copy) pprint(inspect.getmembers(Comment, inspect.isfunction)) if __name__ == '__main__': main()
en
0.947151
# comment.id = 3 # can't immutable
3.221804
3
downloadMusic/main.py
yaosir0317/my_first
0
9812
<reponame>yaosir0317/my_first from enum import Enum import requests class MusicAPP(Enum): qq = "qq" wy = "netease" PRE_URL = "http://www.musictool.top/" headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36"} def get_music_list(name, app, page=1): data = {"input": name, "filter": "name", "type": app, "page": page} resp = requests.post(url=PRE_URL, headers=headers, data=data) print(resp.text) print(resp.json()) if __name__ == '__main__': get_music_list("画", MusicAPP.qq)
from enum import Enum import requests class MusicAPP(Enum): qq = "qq" wy = "netease" PRE_URL = "http://www.musictool.top/" headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36"} def get_music_list(name, app, page=1): data = {"input": name, "filter": "name", "type": app, "page": page} resp = requests.post(url=PRE_URL, headers=headers, data=data) print(resp.text) print(resp.json()) if __name__ == '__main__': get_music_list("画", MusicAPP.qq)
none
1
3.057793
3
app/api/serializers.py
michelmarcondes/django-study-with-docker
0
9813
<filename>app/api/serializers.py from rest_framework import serializers from projects.models import Project, Tag, Review from users.models import Profile class ReviewSerializer(serializers.ModelSerializer): class Meta: model = Review fields = '__all__' class ProfileSerializer(serializers.ModelSerializer): class Meta: model = Profile fields = '__all__' class TagSerializer(serializers.ModelSerializer): class Meta: model = Tag fields = '__all__' class ProjectSerializer(serializers.ModelSerializer): owner = ProfileSerializer(many=False) tags = TagSerializer(many=True) reviews = serializers.SerializerMethodField() class Meta: model = Project fields = '__all__' def get_reviews(self, obj): reviews = obj.review_set.all() serializer = ReviewSerializer(reviews, many=True) return serializer.data
<filename>app/api/serializers.py from rest_framework import serializers from projects.models import Project, Tag, Review from users.models import Profile class ReviewSerializer(serializers.ModelSerializer): class Meta: model = Review fields = '__all__' class ProfileSerializer(serializers.ModelSerializer): class Meta: model = Profile fields = '__all__' class TagSerializer(serializers.ModelSerializer): class Meta: model = Tag fields = '__all__' class ProjectSerializer(serializers.ModelSerializer): owner = ProfileSerializer(many=False) tags = TagSerializer(many=True) reviews = serializers.SerializerMethodField() class Meta: model = Project fields = '__all__' def get_reviews(self, obj): reviews = obj.review_set.all() serializer = ReviewSerializer(reviews, many=True) return serializer.data
none
1
2.323122
2
corehq/apps/domain/views.py
johan--/commcare-hq
0
9814
import copy import datetime from decimal import Decimal import logging import uuid import json import cStringIO from couchdbkit import ResourceNotFound import dateutil from django.core.paginator import Paginator from django.views.generic import View from django.db.models import Sum from django.conf import settings from django.template.loader import render_to_string from django.utils.decorators import method_decorator from django.utils.safestring import mark_safe from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.shortcuts import redirect, render from django.contrib import messages from django.views.decorators.http import require_POST from PIL import Image from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy from corehq.const import USER_DATE_FORMAT from custom.dhis2.forms import Dhis2SettingsForm from custom.dhis2.models import Dhis2Settings from casexml.apps.case.mock import CaseBlock from casexml.apps.case.xml import V2 from corehq.apps.accounting.async_handlers import Select2BillingInfoHandler from corehq.apps.accounting.invoicing import DomainWireInvoiceFactory from corehq.apps.accounting.decorators import ( requires_privilege_with_fallback, ) from corehq.apps.hqwebapp.tasks import send_mail_async from corehq.apps.accounting.exceptions import ( NewSubscriptionError, PaymentRequestError, ) from corehq.apps.accounting.payment_handlers import ( BulkStripePaymentHandler, CreditStripePaymentHandler, InvoiceStripePaymentHandler, ) from corehq.apps.accounting.subscription_changes import DomainDowngradeStatusHandler from corehq.apps.accounting.forms import EnterprisePlanContactForm from corehq.apps.accounting.utils import ( get_change_status, get_privileges, fmt_dollar_amount, quantize_accounting_decimal, get_customer_cards, ) from corehq.apps.hqwebapp.async_handler import AsyncHandlerMixin from corehq.apps.smsbillables.async_handlers import SMSRatesAsyncHandler, SMSRatesSelect2AsyncHandler from corehq.apps.smsbillables.forms import SMSRateCalculatorForm from corehq.apps.users.models import DomainInvitation from corehq.apps.fixtures.models import FixtureDataType from corehq.toggles import NAMESPACE_DOMAIN, all_toggles, CAN_EDIT_EULA, TRANSFER_DOMAIN from corehq.util.context_processors import get_domain_type from dimagi.utils.couch.resource_conflict import retry_resource from corehq import privileges, feature_previews from django_prbac.utils import has_privilege from corehq.apps.accounting.models import ( Subscription, CreditLine, SoftwareProductType, SubscriptionType, DefaultProductPlan, SoftwarePlanEdition, BillingAccount, BillingAccountType, Invoice, BillingRecord, InvoicePdf, PaymentMethodType, PaymentMethod, EntryPoint, WireInvoice, SoftwarePlanVisibility, FeatureType, StripePaymentMethod, ) from corehq.apps.accounting.usage import FeatureUsageCalculator from corehq.apps.accounting.user_text import ( get_feature_name, PricingTable, DESC_BY_EDITION, get_feature_recurring_interval, ) from corehq.apps.hqwebapp.models import ProjectSettingsTab from corehq.apps import receiverwrapper from corehq.apps.domain.calculations import CALCS, CALC_FNS, CALC_ORDER, dom_calc from corehq.apps.domain.decorators import ( domain_admin_required, login_required, require_superuser, login_and_domain_required ) from corehq.apps.domain.forms import ( DomainGlobalSettingsForm, DomainMetadataForm, SnapshotSettingsForm, SnapshotApplicationForm, DomainInternalForm, PrivacySecurityForm, ConfirmNewSubscriptionForm, ProBonoForm, EditBillingAccountInfoForm, ConfirmSubscriptionRenewalForm, SnapshotFixtureForm, TransferDomainForm, SelectSubscriptionTypeForm, INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS) from corehq.apps.domain.models import Domain, LICENSES, TransferDomainRequest from corehq.apps.domain.utils import normalize_domain_name from corehq.apps.hqwebapp.views import BaseSectionPageView, BasePageView, CRUDPaginatedViewMixin from corehq.apps.orgs.models import Organization, OrgRequest, Team from corehq.apps.domain.forms import ProjectSettingsForm from dimagi.utils.decorators.memoized import memoized from dimagi.utils.web import get_ip, json_response, get_site_domain from corehq.apps.users.decorators import require_can_edit_web_users from corehq.apps.receiverwrapper.forms import GenericRepeaterForm, FormRepeaterForm from corehq.apps.receiverwrapper.models import FormRepeater, CaseRepeater, ShortFormRepeater, AppStructureRepeater, \ RepeatRecord from dimagi.utils.post import simple_post from toggle.models import Toggle from corehq.apps.hqwebapp.tasks import send_html_email_async accounting_logger = logging.getLogger('accounting') PAYMENT_ERROR_MESSAGES = { 400: ugettext_lazy('Your request was not formatted properly.'), 403: ugettext_lazy('Forbidden.'), 404: ugettext_lazy('Page not found.'), 500: ugettext_lazy("There was an error processing your request." " We're working quickly to fix the issue. Please try again shortly."), } # Domain not required here - we could be selecting it for the first time. See notes domain.decorators # about why we need this custom login_required decorator @login_required def select(request, domain_select_template='domain/select.html', do_not_redirect=False): domains_for_user = Domain.active_for_user(request.user) if not domains_for_user: return redirect('registration_domain', domain_type=get_domain_type(None, request)) email = request.couch_user.get_email() open_invitations = [e for e in DomainInvitation.by_email(email) if not e.is_expired] additional_context = { 'domains_for_user': domains_for_user, 'open_invitations': open_invitations, } last_visited_domain = request.session.get('last_visited_domain') if open_invitations \ or do_not_redirect \ or not last_visited_domain: return render(request, domain_select_template, additional_context) else: domain = Domain.get_by_name(last_visited_domain) if domain and domain.is_active: # mirrors logic in login_and_domain_required if ( request.couch_user.is_member_of(domain) or domain.is_public or (request.user.is_superuser and not domain.restrict_superusers) or domain.is_snapshot ): try: from corehq.apps.dashboard.views import dashboard_default return dashboard_default(request, last_visited_domain) except Http404: pass del request.session['last_visited_domain'] return render(request, domain_select_template, additional_context) @require_superuser def incomplete_email(request, incomplete_email_template='domain/incomplete_email.html'): from corehq.apps.domain.tasks import ( incomplete_self_started_domains, incomplete_domains_to_email ) context = { 'self_started': incomplete_self_started_domains, 'dimagi_owned': incomplete_domains_to_email, } return render(request, incomplete_email_template, context) class DomainViewMixin(object): """ Paving the way for a world of entirely class-based views. Let's do this, guys. :-) Set strict_domain_fetching to True in subclasses to bypass the cache. """ strict_domain_fetching = False @property @memoized def domain(self): domain = self.args[0] if len(self.args) > 0 else self.kwargs.get('domain', "") return normalize_domain_name(domain) @property @memoized def domain_object(self): domain = Domain.get_by_name(self.domain, strict=self.strict_domain_fetching) if not domain: raise Http404() return domain class LoginAndDomainMixin(object): @method_decorator(login_and_domain_required) def dispatch(self, *args, **kwargs): return super(LoginAndDomainMixin, self).dispatch(*args, **kwargs) class SubscriptionUpgradeRequiredView(LoginAndDomainMixin, BasePageView, DomainViewMixin): page_title = ugettext_lazy("Upgrade Required") template_name = "domain/insufficient_privilege_notification.html" @property def page_url(self): return self.request.get_full_path @property def page_name(self): return _("Sorry, you do not have access to %(feature_name)s") % { 'feature_name': self.feature_name, } @property def is_domain_admin(self): if not hasattr(self.request, 'couch_user'): return False return self.request.couch_user.is_domain_admin(self.domain) @property def page_context(self): return { 'domain': self.domain, 'feature_name': self.feature_name, 'plan_name': self.required_plan_name, 'change_subscription_url': reverse(SelectPlanView.urlname, args=[self.domain]), 'is_domain_admin': self.is_domain_admin, } @property def missing_privilege(self): return self.args[1] @property def feature_name(self): return privileges.Titles.get_name_from_privilege(self.missing_privilege) @property def required_plan_name(self): return DefaultProductPlan.get_lowest_edition_by_domain( self.domain_object, [self.missing_privilege] ) def get(self, request, *args, **kwargs): self.request = request self.args = args return super(SubscriptionUpgradeRequiredView, self).get( request, *args, **kwargs ) class BaseDomainView(LoginAndDomainMixin, BaseSectionPageView, DomainViewMixin): @property def main_context(self): main_context = super(BaseDomainView, self).main_context main_context.update({ 'domain': self.domain, }) return main_context @property @memoized def page_url(self): if self.urlname: return reverse(self.urlname, args=[self.domain]) class BaseProjectSettingsView(BaseDomainView): section_name = ugettext_lazy("Project Settings") template_name = "settings/base_template.html" @property def main_context(self): main_context = super(BaseProjectSettingsView, self).main_context main_context.update({ 'active_tab': ProjectSettingsTab( self.request, self.urlname, domain=self.domain, couch_user=self.request.couch_user, project=self.request.project ), 'is_project_settings': True, }) return main_context @property @memoized def section_url(self): return reverse(EditMyProjectSettingsView.urlname, args=[self.domain]) class DefaultProjectSettingsView(BaseDomainView): urlname = 'domain_settings_default' def get(self, request, *args, **kwargs): if request.couch_user.is_domain_admin(self.domain): return HttpResponseRedirect(reverse(EditBasicProjectInfoView.urlname, args=[self.domain])) return HttpResponseRedirect(reverse(EditMyProjectSettingsView.urlname, args=[self.domain])) class BaseAdminProjectSettingsView(BaseProjectSettingsView): """ The base class for all project settings views that require administrative access. """ @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): return super(BaseProjectSettingsView, self).dispatch(request, *args, **kwargs) class BaseEditProjectInfoView(BaseAdminProjectSettingsView): """ The base class for all the edit project information views. """ strict_domain_fetching = True @property def autocomplete_fields(self): return [] @property def main_context(self): context = super(BaseEditProjectInfoView, self).main_context context.update({ 'autocomplete_fields': self.autocomplete_fields, 'commtrack_enabled': self.domain_object.commtrack_enabled, # ideally the template gets access to the domain doc through # some other means. otherwise it has to be supplied to every view reachable in that sidebar (every # view whose template extends users_base.html); mike says he's refactoring all of this imminently, so # i will not worry about it until he is done 'call_center_enabled': self.domain_object.call_center_config.enabled, 'cloudcare_releases': self.domain_object.cloudcare_releases, }) return context class EditBasicProjectInfoView(BaseEditProjectInfoView): template_name = 'domain/admin/info_basic.html' urlname = 'domain_basic_info' page_title = ugettext_lazy("Basic") @property def can_user_see_meta(self): return self.request.couch_user.is_previewer() @property def can_use_custom_logo(self): return has_privilege(self.request, privileges.CUSTOM_BRANDING) @property @memoized def basic_info_form(self): initial = { 'hr_name': self.domain_object.hr_name or self.domain_object.name, 'default_timezone': self.domain_object.default_timezone, 'case_sharing': json.dumps(self.domain_object.case_sharing), 'call_center_enabled': self.domain_object.call_center_config.enabled, 'call_center_type': self.initial_call_center_type, 'call_center_case_owner': self.initial_call_center_case_owner, 'call_center_case_type': self.domain_object.call_center_config.case_type, 'commtrack_enabled': self.domain_object.commtrack_enabled, } if self.request.method == 'POST': if self.can_user_see_meta: return DomainMetadataForm( self.request.POST, self.request.FILES, user=self.request.couch_user, domain=self.domain_object.name, can_use_custom_logo=self.can_use_custom_logo, ) return DomainGlobalSettingsForm( self.request.POST, self.request.FILES, domain=self.domain_object.name, can_use_custom_logo=self.can_use_custom_logo ) if self.can_user_see_meta: initial.update({ 'is_test': self.domain_object.is_test, 'cloudcare_releases': self.domain_object.cloudcare_releases, }) return DomainMetadataForm( can_use_custom_logo=self.can_use_custom_logo, user=self.request.couch_user, domain=self.domain_object.name, initial=initial ) return DomainGlobalSettingsForm( initial=initial, domain=self.domain_object.name, can_use_custom_logo=self.can_use_custom_logo ) @property @memoized def initial_call_center_case_owner(self): config = self.domain_object.call_center_config if config.use_user_location_as_owner: return DomainGlobalSettingsForm.USE_LOCATIONS_CHOICE return self.domain_object.call_center_config.case_owner_id @property @memoized def initial_call_center_type(self): if self.domain_object.call_center_config.use_fixtures: return DomainGlobalSettingsForm.CASES_AND_FIXTURES_CHOICE return DomainGlobalSettingsForm.CASES_ONLY_CHOICE @property def page_context(self): return { 'basic_info_form': self.basic_info_form, } def post(self, request, *args, **kwargs): if self.basic_info_form.is_valid(): if self.basic_info_form.save(request, self.domain_object): messages.success(request, _("Project settings saved!")) else: messages.error(request, _("There seems to have been an error saving your settings. Please try again!")) return self.get(request, *args, **kwargs) class EditMyProjectSettingsView(BaseProjectSettingsView): template_name = 'domain/admin/my_project_settings.html' urlname = 'my_project_settings' page_title = ugettext_lazy("My Timezone") @property @memoized def my_project_settings_form(self): initial = { 'global_timezone': self.domain_object.default_timezone } if self.domain_membership: initial.update({ 'override_global_tz': self.domain_membership.override_global_tz, 'user_timezone': (self.domain_membership.timezone if self.domain_membership.override_global_tz else self.domain_object.default_timezone), }) else: initial.update({ 'override_global_tz': False, 'user_timezone': initial["global_timezone"], }) if self.request.method == 'POST': return ProjectSettingsForm(self.request.POST, initial=initial) return ProjectSettingsForm(initial=initial) @property @memoized def domain_membership(self): return self.request.couch_user.get_domain_membership(self.domain) @property def page_context(self): return { 'my_project_settings_form': self.my_project_settings_form, 'override_global_tz': self.domain_membership.override_global_tz if self.domain_membership else False, 'no_domain_membership': not self.domain_membership, } def post(self, request, *args, **kwargs): if self.my_project_settings_form.is_valid(): self.my_project_settings_form.save(self.request.couch_user, self.domain) messages.success(request, _("Your project settings have been saved!")) return self.get(request, *args, **kwargs) class EditDhis2SettingsView(BaseProjectSettingsView): template_name = 'domain/admin/dhis2_settings.html' urlname = 'dhis2_settings' page_title = ugettext_lazy("DHIS2 API settings") @property @memoized def dhis2_settings_form(self): settings_ = Dhis2Settings.for_domain(self.domain_object.name) initial = settings_.dhis2 if settings_ else {'enabled': False} if self.request.method == 'POST': return Dhis2SettingsForm(self.request.POST, initial=initial) return Dhis2SettingsForm(initial=initial) @property def page_context(self): return { 'dhis2_settings_form': self.dhis2_settings_form, } def post(self, request, *args, **kwargs): if self.dhis2_settings_form.is_valid(): if self.dhis2_settings_form.save(self.domain_object): messages.success(request, _('DHIS2 API settings successfully updated')) else: messages.error(request, _('There seems to have been an error. Please try again.')) return self.get(request, *args, **kwargs) @require_POST @require_can_edit_web_users def drop_repeater(request, domain, repeater_id): rep = FormRepeater.get(repeater_id) rep.retire() messages.success(request, "Form forwarding stopped!") return HttpResponseRedirect(reverse(DomainForwardingOptionsView.urlname, args=[domain])) @require_POST @require_can_edit_web_users def test_repeater(request, domain): url = request.POST["url"] repeater_type = request.POST['repeater_type'] format = request.POST['format'] form = GenericRepeaterForm( {"url": url, "format": format}, domain=domain, repeater_class=receiverwrapper.models.repeater_types[repeater_type] ) if form.is_valid(): url = form.cleaned_data["url"] # now we fake a post def _stub(repeater_type): if 'case' in repeater_type.lower(): return CaseBlock( case_id='test-case-%s' % uuid.uuid4().hex, create=True, case_type='test', case_name='test case', ).as_string() else: return "<?xml version='1.0' ?><data id='test'><TestString>Test post from CommCareHQ on %s</TestString></data>" % \ (datetime.datetime.utcnow()) fake_post = _stub(repeater_type) try: resp = simple_post(fake_post, url) if 200 <= resp.status < 300: return HttpResponse(json.dumps({"success": True, "response": resp.read(), "status": resp.status})) else: return HttpResponse(json.dumps({"success": False, "response": resp.read(), "status": resp.status})) except Exception, e: errors = str(e) return HttpResponse(json.dumps({"success": False, "response": errors})) else: return HttpResponse(json.dumps({"success": False, "response": "Please enter a valid url."})) def autocomplete_fields(request, field): prefix = request.GET.get('prefix', '') results = Domain.field_by_prefix(field, prefix) return HttpResponse(json.dumps(results)) def logo(request, domain): logo = Domain.get_by_name(domain).get_custom_logo() if logo is None: raise Http404() return HttpResponse(logo[0], content_type=logo[1]) class DomainAccountingSettings(BaseAdminProjectSettingsView): @method_decorator(login_and_domain_required) def dispatch(self, request, *args, **kwargs): return super(DomainAccountingSettings, self).dispatch(request, *args, **kwargs) @property @memoized def product(self): return SoftwareProductType.get_type_by_domain(self.domain_object) @property @memoized def account(self): return BillingAccount.get_account_by_domain(self.domain) @property def current_subscription(self): return Subscription.get_subscribed_plan_by_domain(self.domain_object)[1] class DomainSubscriptionView(DomainAccountingSettings): urlname = 'domain_subscription_view' template_name = 'domain/current_subscription.html' page_title = ugettext_lazy("Current Subscription") @property def can_purchase_credits(self): return self.request.couch_user.is_domain_admin(self.domain) @property def plan(self): plan_version, subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object) date_end = None next_subscription = { 'exists': False, 'can_renew': False, 'name': None, 'price': None, } cards = None general_credits = None if subscription: cards = get_customer_cards(self.account, self.request.user.username, self.domain) date_end = (subscription.date_end.strftime(USER_DATE_FORMAT) if subscription.date_end is not None else "--") if subscription.date_end is not None: if subscription.is_renewed: next_product = self.get_product_summary(subscription.next_subscription.plan_version, self.account, subscription) next_subscription.update({ 'exists': True, 'date_start': subscription.next_subscription.date_start.strftime(USER_DATE_FORMAT), 'name': subscription.next_subscription.plan_version.plan.name, 'price': next_product['monthly_fee'], }) else: days_left = (subscription.date_end - datetime.date.today()).days next_subscription.update({ 'can_renew': days_left <= 30, 'renew_url': reverse(SubscriptionRenewalView.urlname, args=[self.domain]), }) general_credits = CreditLine.get_credits_by_subscription_and_features(subscription) elif self.account is not None: general_credits = CreditLine.get_credits_for_account(self.account) if general_credits: general_credits = self._fmt_credit(self._credit_grand_total(general_credits)) info = { 'products': [self.get_product_summary(plan_version, self.account, subscription)], 'features': self.get_feature_summary(plan_version, self.account, subscription), 'general_credit': general_credits, 'css_class': "label-plan %s" % plan_version.plan.edition.lower(), 'do_not_invoice': subscription.do_not_invoice if subscription is not None else False, 'is_trial': subscription.is_trial if subscription is not None else False, 'date_start': (subscription.date_start.strftime(USER_DATE_FORMAT) if subscription is not None else None), 'date_end': date_end, 'cards': cards, 'next_subscription': next_subscription, } info.update(plan_version.user_facing_description) return info def _fmt_credit(self, credit_amount=None): if credit_amount is None: return { 'amount': "--", } return { 'amount': fmt_dollar_amount(credit_amount), 'is_visible': credit_amount != Decimal('0.0'), } def _credit_grand_total(self, credit_lines): return sum([c.balance for c in credit_lines]) if credit_lines else Decimal('0.00') def get_product_summary(self, plan_version, account, subscription): product_rates = plan_version.product_rates.all() if len(product_rates) > 1: # Models and UI are both written to support multiple products, # but for now, each subscription can only have one product. accounting_logger.error( "[BILLING] " "There seem to be multiple ACTIVE NEXT subscriptions for the subscriber %s. " "Odd, right? The latest one by date_created was used, but consider this an issue." % self.account ) product_rate = product_rates[0] product_info = { 'name': product_rate.product.product_type, 'monthly_fee': _("USD %s /month") % product_rate.monthly_fee, 'credit': None, 'type': product_rate.product.product_type, } credit_lines = None if subscription is not None: credit_lines = CreditLine.get_credits_by_subscription_and_features( subscription, product_type=product_rate.product.product_type ) elif account is not None: credit_lines = CreditLine.get_credits_for_account( account, product_type=product_rate.product.product_type ) if credit_lines: product_info['credit'] = self._fmt_credit(self._credit_grand_total(credit_lines)) return product_info def get_feature_summary(self, plan_version, account, subscription): feature_summary = [] for feature_rate in plan_version.feature_rates.all(): usage = FeatureUsageCalculator(feature_rate, self.domain).get_usage() feature_info = { 'name': get_feature_name(feature_rate.feature.feature_type, self.product), 'usage': usage, 'remaining': ( feature_rate.monthly_limit - usage if feature_rate.monthly_limit != -1 else _('Unlimited') ), 'credit': self._fmt_credit(), 'type': feature_rate.feature.feature_type, 'recurring_interval': get_feature_recurring_interval(feature_rate.feature.feature_type), } credit_lines = None if subscription is not None: credit_lines = CreditLine.get_credits_by_subscription_and_features( subscription, feature_type=feature_rate.feature.feature_type ) elif account is not None: credit_lines = CreditLine.get_credits_for_account( account, feature_type=feature_rate.feature.feature_type) if credit_lines: feature_info['credit'] = self._fmt_credit(self._credit_grand_total(credit_lines)) feature_summary.append(feature_info) return feature_summary @property def page_context(self): return { 'plan': self.plan, 'change_plan_url': reverse(SelectPlanView.urlname, args=[self.domain]), 'can_purchase_credits': self.can_purchase_credits, 'credit_card_url': reverse(CreditsStripePaymentView.urlname, args=[self.domain]), 'wire_url': reverse(CreditsWireInvoiceView.urlname, args=[self.domain]), 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'payment_error_messages': PAYMENT_ERROR_MESSAGES, 'sms_rate_calc_url': reverse(SMSRatesView.urlname, args=[self.domain]), 'user_email': self.request.couch_user.username, } class EditExistingBillingAccountView(DomainAccountingSettings, AsyncHandlerMixin): template_name = 'domain/update_billing_contact_info.html' urlname = 'domain_update_billing_info' page_title = ugettext_lazy("Billing Information") async_handlers = [ Select2BillingInfoHandler, ] @property @memoized def billing_info_form(self): if self.request.method == 'POST': return EditBillingAccountInfoForm( self.account, self.domain, self.request.couch_user.username, data=self.request.POST ) return EditBillingAccountInfoForm(self.account, self.domain, self.request.couch_user.username) def dispatch(self, request, *args, **kwargs): if self.account is None: raise Http404() return super(EditExistingBillingAccountView, self).dispatch(request, *args, **kwargs) @property def page_context(self): return { 'billing_account_info_form': self.billing_info_form, 'cards': self._get_cards(), 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'card_base_url': reverse(CardsView.url_name, args=[self.domain]), } def _get_cards(self): user = self.request.user.username payment_method, new_payment_method = StripePaymentMethod.objects.get_or_create( web_user=user, method_type=PaymentMethodType.STRIPE, ) return payment_method.all_cards_serialized(self.account) def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response if self.billing_info_form.is_valid(): is_saved = self.billing_info_form.save() if not is_saved: messages.error( request, _("It appears that there was an issue updating your contact information. " "We've been notified of the issue. Please try submitting again, and if the problem " "persists, please try in a few hours.")) else: messages.success( request, _("Billing contact information was successfully updated.") ) return HttpResponseRedirect(reverse(EditExistingBillingAccountView.urlname, args=[self.domain])) return self.get(request, *args, **kwargs) class DomainBillingStatementsView(DomainAccountingSettings, CRUDPaginatedViewMixin): template_name = 'domain/billing_statements.html' urlname = 'domain_billing_statements' page_title = ugettext_lazy("Billing Statements") limit_text = ugettext_lazy("statements per page") empty_notification = ugettext_lazy("No Billing Statements match the current criteria.") loading_message = ugettext_lazy("Loading statements...") @property def parameters(self): return self.request.POST if self.request.method == 'POST' else self.request.GET @property def stripe_cards(self): return get_customer_cards(self.account, self.request.user.username, self.domain) @property def show_hidden(self): if not self.request.user.is_superuser: return False return bool(self.request.POST.get('additionalData[show_hidden]')) @property def show_unpaid(self): try: return json.loads(self.request.POST.get('additionalData[show_unpaid]')) except TypeError: return False @property def invoices(self): invoices = Invoice.objects.filter(subscription__subscriber__domain=self.domain) if not self.show_hidden: invoices = invoices.filter(is_hidden=False) if self.show_unpaid: invoices = invoices.filter(date_paid__exact=None) return invoices.order_by('-date_start', '-date_end') @property def total(self): return self.paginated_invoices.count @property @memoized def paginated_invoices(self): return Paginator(self.invoices, self.limit) @property def total_balance(self): """ Returns the total balance of unpaid, unhidden invoices. Doesn't take into account the view settings on the page. """ invoices = (Invoice.objects .filter(subscription__subscriber__domain=self.domain) .filter(date_paid__exact=None) .filter(is_hidden=False)) return invoices.aggregate( total_balance=Sum('balance') ).get('total_balance') or 0.00 @property def column_names(self): return [ _("Statement No."), _("Plan"), _("Billing Period"), _("Date Due"), _("Payment Status"), _("PDF"), ] @property def page_context(self): pagination_context = self.pagination_context pagination_context.update({ 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'payment_error_messages': PAYMENT_ERROR_MESSAGES, 'process_invoice_payment_url': reverse( InvoiceStripePaymentView.urlname, args=[self.domain], ), 'process_bulk_payment_url': reverse( BulkStripePaymentView.urlname, args=[self.domain], ), 'process_wire_invoice_url': reverse( WireInvoiceView.urlname, args=[self.domain], ), 'stripe_cards': self.stripe_cards, 'total_balance': self.total_balance, }) return pagination_context @property def can_pay_invoices(self): return self.request.couch_user.is_domain_admin(self.domain) @property def paginated_list(self): for invoice in self.paginated_invoices.page(self.page).object_list: try: last_billing_record = BillingRecord.objects.filter( invoice=invoice ).latest('date_created') if invoice.is_paid: payment_status = (_("Paid on %s.") % invoice.date_paid.strftime(USER_DATE_FORMAT)) payment_class = "label label-inverse" else: payment_status = _("Not Paid") payment_class = "label label-important" date_due = ( (invoice.date_due.strftime(USER_DATE_FORMAT) if not invoice.is_paid else _("Already Paid")) if invoice.date_due else _("None") ) yield { 'itemData': { 'id': invoice.id, 'invoice_number': invoice.invoice_number, 'start': invoice.date_start.strftime(USER_DATE_FORMAT), 'end': invoice.date_end.strftime(USER_DATE_FORMAT), 'plan': invoice.subscription.plan_version.user_facing_description, 'payment_status': payment_status, 'payment_class': payment_class, 'date_due': date_due, 'pdfUrl': reverse( BillingStatementPdfView.urlname, args=[self.domain, last_billing_record.pdf_data_id] ), 'canMakePayment': (not invoice.is_paid and self.can_pay_invoices), 'balance': "%s" % quantize_accounting_decimal(invoice.balance), }, 'template': 'statement-row-template', } except BillingRecord.DoesNotExist: logging.error( "An invoice was generated for %(invoice_id)d " "(domain: %(domain)s), but no billing record!" % { 'invoice_id': invoice.id, 'domain': self.domain, }) def refresh_item(self, item_id): pass def post(self, *args, **kwargs): return self.paginate_crud_response def dispatch(self, request, *args, **kwargs): if self.account is None: raise Http404() return super(DomainBillingStatementsView, self).dispatch(request, *args, **kwargs) class BaseStripePaymentView(DomainAccountingSettings): http_method_names = ['post'] @property def account(self): raise NotImplementedError("you must impmement the property account") @property @memoized def domain_admin(self): if self.request.couch_user.is_domain_admin(self.domain): return self.request.couch_user.username else: raise PaymentRequestError( "The logged in user was not a domain admin." ) def get_or_create_payment_method(self): return StripePaymentMethod.objects.get_or_create( web_user=self.domain_admin, method_type=PaymentMethodType.STRIPE, )[0] def get_payment_handler(self): """Returns a StripePaymentHandler object """ raise NotImplementedError("You must impmenent get_payment_handler()") def post(self, request, *args, **kwargs): try: payment_handler = self.get_payment_handler() response = payment_handler.process_request(request) except PaymentRequestError as e: accounting_logger.error( "[BILLING] Failed to process Stripe Payment due to bad " "request for domain %(domain)s user %(web_user)s: " "%(error)s" % { 'domain': self.domain, 'web_user': self.request.user.username, 'error': e, } ) response = { 'error': { 'message': _( "There was an issue processing your payment. No " "charges were made. We're looking into the issue " "as quickly as possible. Sorry for the inconvenience." ) } } return json_response(response) class CreditsStripePaymentView(BaseStripePaymentView): urlname = 'domain_credits_payment' @property @memoized def account(self): return BillingAccount.get_or_create_account_by_domain( self.domain, created_by=self.request.user.username, account_type=BillingAccountType.USER_CREATED, entry_point=EntryPoint.SELF_STARTED, )[0] def get_payment_handler(self): return CreditStripePaymentHandler( self.get_or_create_payment_method(), self.domain, self.account, subscription=Subscription.get_subscribed_plan_by_domain(self.domain_object)[1], post_data=self.request.POST.copy(), ) class CreditsWireInvoiceView(DomainAccountingSettings): http_method_names = ['post'] urlname = 'domain_wire_payment' @method_decorator(login_and_domain_required) def dispatch(self, request, *args, **kwargs): return super(CreditsWireInvoiceView, self).dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): emails = request.POST.get('emails', []).split() amount = Decimal(request.POST.get('amount', 0)) wire_invoice_factory = DomainWireInvoiceFactory(request.domain, contact_emails=emails) try: wire_invoice_factory.create_wire_credits_invoice(self._get_items(request), amount) except Exception as e: return json_response({'error': {'message': str(e)}}) return json_response({'success': True}) def _get_items(self, request): product_type = SoftwareProductType.get_type_by_domain(Domain.get_by_name(self.domain)) features = [{'type': get_feature_name(feature_type[0], product_type), 'amount': Decimal(request.POST.get(feature_type[0], 0))} for feature_type in FeatureType.CHOICES if Decimal(request.POST.get(feature_type[0], 0)) > 0] products = [{'type': pt[0], 'amount': Decimal(request.POST.get(pt[0], 0))} for pt in SoftwareProductType.CHOICES if Decimal(request.POST.get(pt[0], 0)) > 0] return products + features class InvoiceStripePaymentView(BaseStripePaymentView): urlname = 'domain_invoice_payment' @property @memoized def invoice(self): try: invoice_id = self.request.POST['invoice_id'] except IndexError: raise PaymentRequestError("invoice_id is required") try: return Invoice.objects.get(pk=invoice_id) except Invoice.DoesNotExist: raise PaymentRequestError( "Could not find a matching invoice for invoice_id '%s'" % invoice_id ) @property def account(self): return self.invoice.subscription.account def get_payment_handler(self): return InvoiceStripePaymentHandler( self.get_or_create_payment_method(), self.domain, self.invoice ) class BulkStripePaymentView(BaseStripePaymentView): urlname = 'domain_bulk_payment' @property def account(self): return BillingAccount.get_account_by_domain(self.domain) def get_payment_handler(self): return BulkStripePaymentHandler( self.get_or_create_payment_method(), self.domain ) class WireInvoiceView(View): http_method_names = ['post'] urlname = 'domain_wire_invoice' @method_decorator(login_and_domain_required) @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): return super(WireInvoiceView, self).dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): emails = request.POST.get('emails', []).split() balance = Decimal(request.POST.get('customPaymentAmount', 0)) wire_invoice_factory = DomainWireInvoiceFactory(request.domain, contact_emails=emails) try: wire_invoice_factory.create_wire_invoice(balance) except Exception, e: return json_response({'error': {'message', e}}) return json_response({'success': True}) class BillingStatementPdfView(View): urlname = 'domain_billing_statement_download' @method_decorator(login_and_domain_required) @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): return super(BillingStatementPdfView, self).dispatch(request, *args, **kwargs) def get(self, request, *args, **kwargs): domain = args[0] statement_id = kwargs.get('statement_id') if statement_id is None or domain is None: raise Http404() try: invoice_pdf = InvoicePdf.get(statement_id) except ResourceNotFound: raise Http404() try: if invoice_pdf.is_wire: invoice = WireInvoice.objects.get( pk=invoice_pdf.invoice_id, domain=domain ) else: invoice = Invoice.objects.get( pk=invoice_pdf.invoice_id, subscription__subscriber__domain=domain ) except (Invoice.DoesNotExist, WireInvoice.DoesNotExist): raise Http404() if invoice.is_wire: edition = 'Bulk' else: edition = DESC_BY_EDITION[invoice.subscription.plan_version.plan.edition]['name'] filename = "%(pdf_id)s_%(domain)s_%(edition)s_%(filename)s" % { 'pdf_id': invoice_pdf._id, 'domain': domain, 'edition': edition, 'filename': invoice_pdf.get_filename(invoice), } try: data = invoice_pdf.get_data(invoice) response = HttpResponse(data, content_type='application/pdf') response['Content-Disposition'] = 'inline;filename="%s' % filename except Exception as e: logging.error('[Billing] Fetching invoice PDF failed: %s' % e) return HttpResponse(_("Could not obtain billing statement. " "An issue has been submitted.")) return response class InternalSubscriptionManagementView(BaseAdminProjectSettingsView): template_name = 'domain/internal_subscription_management.html' urlname = 'internal_subscription_mgmt' page_title = ugettext_lazy("Dimagi Internal Subscription Management") form_classes = INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS @method_decorator(require_superuser) def get(self, request, *args, **kwargs): return super(InternalSubscriptionManagementView, self).get(request, *args, **kwargs) @method_decorator(require_superuser) def post(self, request, *args, **kwargs): form = self.get_post_form if form.is_valid(): try: form.process_subscription_management() return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain])) except NewSubscriptionError as e: messages.error(self.request, e.message) return self.get(request, *args, **kwargs) @property def page_context(self): return { 'plan_name': Subscription.get_subscribed_plan_by_domain(self.domain)[0], 'select_subscription_type_form': self.select_subscription_type_form, 'subscription_management_forms': self.slug_to_form.values(), 'today': datetime.date.today(), } @property def get_post_form(self): return self.slug_to_form[self.request.POST.get('slug')] @property @memoized def slug_to_form(self): def create_form(form_class): if self.request.method == 'POST' and form_class.slug == self.request.POST.get('slug'): return form_class(self.domain, self.request.couch_user.username, self.request.POST) return form_class(self.domain, self.request.couch_user.username) return {form_class.slug: create_form(form_class) for form_class in self.form_classes} @property @memoized def select_subscription_type_form(self): if self.request.method == 'POST': for form_slug in self.slug_to_form: if form_slug in self.request.POST: return SelectSubscriptionTypeForm({ 'subscription_type': form_slug, }) subscription_type = None subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object)[1] if subscription is None: subscription_type = None else: plan = subscription.plan_version.plan if subscription.service_type == SubscriptionType.CONTRACTED: subscription_type = "contracted_partner" elif plan.edition == SoftwarePlanEdition.ENTERPRISE: subscription_type = "dimagi_only_enterprise" elif (plan.edition == SoftwarePlanEdition.ADVANCED and plan.visibility == SoftwarePlanVisibility.TRIAL_INTERNAL): subscription_type = "advanced_extended_trial" return SelectSubscriptionTypeForm({'subscription_type': subscription_type}) class SelectPlanView(DomainAccountingSettings): template_name = 'domain/select_plan.html' urlname = 'domain_select_plan' page_title = ugettext_lazy("Change Plan") step_title = ugettext_lazy("Select Plan") edition = None lead_text = ugettext_lazy("Please select a plan below that fits your organization's needs.") @property def edition_name(self): if self.edition: return DESC_BY_EDITION[self.edition]['name'] @property def is_non_ops_superuser(self): if not self.request.couch_user.is_superuser: return False return not has_privilege(self.request, privileges.ACCOUNTING_ADMIN) @property def parent_pages(self): return [ { 'title': DomainSubscriptionView.page_title, 'url': reverse(DomainSubscriptionView.urlname, args=[self.domain]), } ] @property def steps(self): edition_name = u" (%s)" % self.edition_name if self.edition_name else "" return [ { 'title': _(u"1. Select a Plan%(edition_name)s") % { "edition_name": edition_name }, 'url': reverse(SelectPlanView.urlname, args=[self.domain]), } ] @property def main_context(self): context = super(SelectPlanView, self).main_context context.update({ 'steps': self.steps, 'step_title': self.step_title, 'lead_text': self.lead_text, }) return context @property def page_context(self): return { 'pricing_table': PricingTable.get_table_by_product(self.product, domain=self.domain), 'current_edition': (self.current_subscription.plan_version.plan.edition.lower() if self.current_subscription is not None and not self.current_subscription.is_trial else ""), 'is_non_ops_superuser': self.is_non_ops_superuser, } class EditPrivacySecurityView(BaseAdminProjectSettingsView): template_name = "domain/admin/project_privacy.html" urlname = "privacy_info" page_title = ugettext_lazy("Privacy and Security") @property @memoized def privacy_form(self): initial = { "secure_submissions": self.domain_object.secure_submissions, "restrict_superusers": self.domain_object.restrict_superusers, "allow_domain_requests": self.domain_object.allow_domain_requests, } if self.request.method == 'POST': return PrivacySecurityForm(self.request.POST, initial=initial) return PrivacySecurityForm(initial=initial) @property def page_context(self): return { 'privacy_form': self.privacy_form } def post(self, request, *args, **kwargs): if self.privacy_form.is_valid(): self.privacy_form.save(self.domain_object) messages.success(request, _("Your project settings have been saved!")) return self.get(request, *args, **kwargs) class SelectedEnterprisePlanView(SelectPlanView): template_name = 'domain/selected_enterprise_plan.html' urlname = 'enterprise_request_quote' step_title = ugettext_lazy("Contact Dimagi") edition = SoftwarePlanEdition.ENTERPRISE @property def steps(self): last_steps = super(SelectedEnterprisePlanView, self).steps last_steps.append({ 'title': _("2. Contact Dimagi"), 'url': reverse(SelectedEnterprisePlanView.urlname, args=[self.domain]), }) return last_steps @property @memoized def is_not_redirect(self): return not 'plan_edition' in self.request.POST @property @memoized def enterprise_contact_form(self): if self.request.method == 'POST' and self.is_not_redirect: return EnterprisePlanContactForm(self.domain, self.request.couch_user, data=self.request.POST) return EnterprisePlanContactForm(self.domain, self.request.couch_user) @property def page_context(self): return { 'enterprise_contact_form': self.enterprise_contact_form, } def post(self, request, *args, **kwargs): if self.is_not_redirect and self.enterprise_contact_form.is_valid(): self.enterprise_contact_form.send_message() messages.success(request, _("Your request was sent to Dimagi. " "We will try our best to follow up in a timely manner.")) return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain])) return self.get(request, *args, **kwargs) class ConfirmSelectedPlanView(SelectPlanView): template_name = 'domain/confirm_plan.html' urlname = 'confirm_selected_plan' step_title = ugettext_lazy("Confirm Plan") @property def steps(self): last_steps = super(ConfirmSelectedPlanView, self).steps last_steps.append({ 'title': _("2. Confirm Plan"), 'url': reverse(SelectPlanView.urlname, args=[self.domain]), }) return last_steps @property @memoized def edition(self): edition = self.request.POST.get('plan_edition').title() if edition not in [e[0] for e in SoftwarePlanEdition.CHOICES]: raise Http404() return edition @property @memoized def selected_plan_version(self): return DefaultProductPlan.get_default_plan_by_domain(self.domain, self.edition).plan.get_version() @property def downgrade_messages(self): current_plan_version, subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object) if subscription is None: current_plan_version = None downgrades = get_change_status(current_plan_version, self.selected_plan_version)[1] downgrade_handler = DomainDowngradeStatusHandler( self.domain_object, self.selected_plan_version, downgrades, web_user=self.request.user.username ) return downgrade_handler.get_response() @property def page_context(self): return { 'downgrade_messages': self.downgrade_messages, 'current_plan': (self.current_subscription.plan_version.user_facing_description if self.current_subscription is not None else None), 'show_community_notice': (self.edition == SoftwarePlanEdition.COMMUNITY and self.current_subscription is None), } @property def main_context(self): context = super(ConfirmSelectedPlanView, self).main_context context.update({ 'plan': self.selected_plan_version.user_facing_description, }) return context def get(self, request, *args, **kwargs): return HttpResponseRedirect(reverse(SelectPlanView.urlname, args=[self.domain])) def post(self, request, *args, **kwargs): if self.edition == SoftwarePlanEdition.ENTERPRISE and not self.request.couch_user.is_superuser: return HttpResponseRedirect(reverse(SelectedEnterprisePlanView.urlname, args=[self.domain])) return super(ConfirmSelectedPlanView, self).get(request, *args, **kwargs) class ConfirmBillingAccountInfoView(ConfirmSelectedPlanView, AsyncHandlerMixin): template_name = 'domain/confirm_billing_info.html' urlname = 'confirm_billing_account_info' step_title = ugettext_lazy("Confirm Billing Information") is_new = False async_handlers = [ Select2BillingInfoHandler, ] @property def steps(self): last_steps = super(ConfirmBillingAccountInfoView, self).steps last_steps.append({ 'title': _("3. Confirm Billing Account"), 'url': reverse(ConfirmBillingAccountInfoView.urlname, args=[self.domain]), }) return last_steps @property @memoized def account(self): if self.current_subscription: return self.current_subscription.account account, self.is_new = BillingAccount.get_or_create_account_by_domain( self.domain, created_by=self.request.couch_user.username, account_type=BillingAccountType.USER_CREATED, entry_point=EntryPoint.SELF_STARTED, ) return account @property def payment_method(self): user = self.request.user.username payment_method, __ = StripePaymentMethod.objects.get_or_create( web_user=user, method_type=PaymentMethodType.STRIPE, ) return payment_method @property @memoized def is_form_post(self): return 'company_name' in self.request.POST @property @memoized def billing_account_info_form(self): initial = None if self.edition == SoftwarePlanEdition.ENTERPRISE and self.request.couch_user.is_superuser: initial = { 'company_name': "Dimagi", 'first_line': "585 Massachusetts Ave", 'second_line': "Suite 4", 'city': "Cambridge", 'state_province_region': "MA", 'postal_code': "02139", 'country': "US", } if self.request.method == 'POST' and self.is_form_post: return ConfirmNewSubscriptionForm( self.account, self.domain, self.request.couch_user.username, self.selected_plan_version, self.current_subscription, data=self.request.POST, initial=initial ) return ConfirmNewSubscriptionForm(self.account, self.domain, self.request.couch_user.username, self.selected_plan_version, self.current_subscription, initial=initial) @property def page_context(self): return { 'billing_account_info_form': self.billing_account_info_form, 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'cards': self.payment_method.all_cards_serialized(self.account) } def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response if self.edition == SoftwarePlanEdition.ENTERPRISE and not self.request.couch_user.is_superuser: return HttpResponseRedirect(reverse(SelectedEnterprisePlanView.urlname, args=[self.domain])) if self.is_form_post and self.billing_account_info_form.is_valid(): is_saved = self.billing_account_info_form.save() software_plan_name = DESC_BY_EDITION[self.selected_plan_version.plan.edition]['name'].encode('utf-8') if not is_saved: messages.error( request, _("It appears there was an issue subscribing your project to the %s Software Plan. You " "may try resubmitting, but if that doesn't work, rest assured someone will be " "contacting you shortly.") % software_plan_name) else: messages.success( request, _("Your project has been successfully subscribed to the %s Software Plan." % software_plan_name) ) return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain])) return super(ConfirmBillingAccountInfoView, self).post(request, *args, **kwargs) class SubscriptionMixin(object): @property @memoized def subscription(self): subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object)[1] if subscription is None: raise Http404 if subscription.is_renewed: raise Http404 return subscription class SubscriptionRenewalView(SelectPlanView, SubscriptionMixin): urlname = "domain_subscription_renewal" page_title = ugettext_lazy("Renew Plan") step_title = ugettext_lazy("Renew or Change Plan") @property def lead_text(self): return ugettext_lazy("Based on your current usage we recommend you use the <strong>{plan}</strong> plan" .format(plan=self.current_subscription.plan_version.plan.edition)) @property def main_context(self): context = super(SubscriptionRenewalView, self).main_context context.update({'is_renewal': True}) return context @property def page_context(self): context = super(SubscriptionRenewalView, self).page_context current_privs = get_privileges(self.subscription.plan_version) plan = DefaultProductPlan.get_lowest_edition_by_domain( self.domain, current_privs, return_plan=False, ).lower() context['current_edition'] = (plan if self.current_subscription is not None and not self.current_subscription.is_trial else "") return context class ConfirmSubscriptionRenewalView(DomainAccountingSettings, AsyncHandlerMixin, SubscriptionMixin): template_name = 'domain/confirm_subscription_renewal.html' urlname = 'domain_subscription_renewal_confirmation' page_title = ugettext_lazy("Renew Plan") async_handlers = [ Select2BillingInfoHandler, ] @property @memoized def next_plan_version(self): new_edition = self.request.POST.get('plan_edition').title() plan_version = DefaultProductPlan.get_default_plan_by_domain(self.domain, new_edition) if plan_version is None: logging.error("[BILLING] Could not find a matching renewable plan " "for %(domain)s, subscription number %(sub_pk)s." % { 'domain': self.domain, 'sub_pk': self.subscription.pk }) raise Http404 return plan_version @property @memoized def confirm_form(self): if self.request.method == 'POST' and "from_plan_page" not in self.request.POST: return ConfirmSubscriptionRenewalForm( self.account, self.domain, self.request.couch_user.username, self.subscription, self.next_plan_version, data=self.request.POST, ) return ConfirmSubscriptionRenewalForm( self.account, self.domain, self.request.couch_user.username, self.subscription, self.next_plan_version, ) @property def page_context(self): return { 'subscription': self.subscription, 'plan': self.subscription.plan_version.user_facing_description, 'confirm_form': self.confirm_form, 'next_plan': self.next_plan_version.user_facing_description, } def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response if self.confirm_form.is_valid(): is_saved = self.confirm_form.save() if not is_saved: messages.error( request, _( "There was an issue renewing your subscription. We " "have been notified of the issue. Please try " "submitting again, and if the problem persists, " "please try in a few hours." ) ) else: messages.success( request, _("Your subscription was successfully renewed!") ) return HttpResponseRedirect( reverse(DomainSubscriptionView.urlname, args=[self.domain]) ) return self.get(request, *args, **kwargs) class ExchangeSnapshotsView(BaseAdminProjectSettingsView): template_name = 'domain/snapshot_settings.html' urlname = 'domain_snapshot_settings' page_title = ugettext_lazy("CommCare Exchange") @property def page_context(self): return { 'project': self.domain_object, 'snapshots': list(self.domain_object.snapshots()), 'published_snapshot': self.domain_object.published_snapshot(), } class CreateNewExchangeSnapshotView(BaseAdminProjectSettingsView): template_name = 'domain/create_snapshot.html' urlname = 'domain_create_snapshot' page_title = ugettext_lazy("Publish New Version") strict_domain_fetching = True @property def parent_pages(self): return [{ 'title': ExchangeSnapshotsView.page_title, 'url': reverse(ExchangeSnapshotsView.urlname, args=[self.domain]), }] @property def page_context(self): context = { 'form': self.snapshot_settings_form, 'app_forms': self.app_forms, 'fixture_forms': self.fixture_forms, 'can_publish_as_org': self.can_publish_as_org, 'autocomplete_fields': ('project_type', 'phone_model', 'user_type', 'city', 'countries', 'region'), } if self.published_snapshot: context.update({ 'published_as_org': self.published_snapshot.publisher == 'organization', 'author': self.published_snapshot.author, }) elif self.request.method == 'POST': context.update({ 'published_as_org': self.request.POST.get('publisher', '') == 'organization', 'author': self.request.POST.get('author', '') }) return context @property def can_publish_as_org(self): return (self.domain_object.get_organization() and self.request.couch_user.is_org_admin(self.domain_object.get_organization().name)) @property @memoized def snapshots(self): return list(self.domain_object.snapshots()) @property @memoized def published_snapshot(self): return self.snapshots[0] if self.snapshots else self.domain_object @property @memoized def published_apps(self): published_apps = {} if self.published_snapshot: for app in self.published_snapshot.full_applications(): base_app_id = app.copy_of if self.domain_object == self.published_snapshot else app.copied_from.copy_of if base_app_id: published_apps[base_app_id] = app return published_apps @property def app_forms(self): app_forms = [] for app in self.domain_object.applications(): if self.request.method == 'POST': app_forms.append((app, SnapshotApplicationForm(self.request.POST, prefix=app.id))) elif self.published_snapshot and app.copy_of in self.published_apps: original = self.published_apps[app.copy_of] app_forms.append((app, SnapshotApplicationForm(initial={ 'publish': True, 'name': original.name, 'description': original.description, 'deployment_date': original.deployment_date, 'user_type': original.user_type, 'attribution_notes': original.attribution_notes, 'phone_model': original.phone_model, }, prefix=app.id))) else: app_forms.append((app, SnapshotApplicationForm( initial={ 'publish': (self.published_snapshot is None or self.published_snapshot == self.domain_object) }, prefix=app.id))) return app_forms @property @memoized def published_fixtures(self): return [f.copy_from for f in FixtureDataType.by_domain(self.published_snapshot._id)] @property def fixture_forms(self): fixture_forms = [] for fixture in FixtureDataType.by_domain(self.domain_object.name): fixture.id = fixture._id if self.request.method == 'POST': fixture_forms.append((fixture, SnapshotFixtureForm(self.request.POST, prefix=fixture._id))) else: fixture_forms.append((fixture, SnapshotFixtureForm( initial={ 'publish': (self.published_snapshot == self.domain_object or fixture._id in self.published_fixtures) }, prefix=fixture._id))) return fixture_forms @property @memoized def snapshot_settings_form(self): if self.request.method == 'POST': form = SnapshotSettingsForm(self.request.POST, self.request.FILES, domain=self.domain_object, is_superuser=self.request.user.is_superuser) return form proj = self.published_snapshot if self.published_snapshot else self.domain_object initial = { 'case_sharing': json.dumps(proj.case_sharing), 'publish_on_submit': True, 'share_multimedia': self.published_snapshot.multimedia_included if self.published_snapshot else True, } init_attribs = ['default_timezone', 'project_type', 'license'] if self.published_snapshot: init_attribs.extend(['title', 'description', 'short_description']) if self.published_snapshot.yt_id: initial['video'] = 'http://www.youtube.com/watch?v=%s' % self.published_snapshot.yt_id for attr in init_attribs: initial[attr] = getattr(proj, attr) return SnapshotSettingsForm(initial=initial, domain=self.domain_object, is_superuser=self.request.user.is_superuser) @property @memoized def has_published_apps(self): for app in self.domain_object.applications(): if self.request.POST.get("%s-publish" % app.id, False): return True messages.error(self.request, _("Cannot publish a project without applications to CommCare Exchange")) return False @property def has_signed_eula(self): eula_signed = self.request.couch_user.is_eula_signed() if not eula_signed: messages.error(self.request, _("You must agree to our eula to publish a project to Exchange")) return eula_signed @property def has_valid_form(self): is_valid = self.snapshot_settings_form.is_valid() if not is_valid: messages.error(self.request, _("There are some problems with your form. " "Please address these issues and try again.")) return is_valid def post(self, request, *args, **kwargs): if self.has_published_apps and self.has_signed_eula and self.has_valid_form: new_license = request.POST['license'] if request.POST.get('share_multimedia', False): app_ids = self.snapshot_settings_form._get_apps_to_publish() media = self.domain_object.all_media(from_apps=app_ids) for m_file in media: if self.domain not in m_file.shared_by: m_file.shared_by.append(self.domain) # set the license of every multimedia file that doesn't yet have a license set if not m_file.license: m_file.update_or_add_license(self.domain, type=new_license, should_save=False) m_file.save() if not request.POST.get('share_reminders', False): share_reminders = False else: share_reminders = True copy_by_id = set() for k in request.POST.keys(): if k.endswith("-publish"): copy_by_id.add(k[:-len("-publish")]) old = self.domain_object.published_snapshot() new_domain = self.domain_object.save_snapshot( share_reminders=share_reminders, copy_by_id=copy_by_id) new_domain.license = new_license new_domain.description = request.POST['description'] new_domain.short_description = request.POST['short_description'] new_domain.project_type = request.POST['project_type'] new_domain.title = request.POST['title'] new_domain.multimedia_included = request.POST.get('share_multimedia', '') == 'on' new_domain.publisher = request.POST.get('publisher', None) or 'user' if request.POST.get('video'): new_domain.yt_id = self.snapshot_settings_form.cleaned_data['video'] new_domain.author = request.POST.get('author', None) new_domain.is_approved = False new_domain.is_starter_app = request.POST.get('is_starter_app', '') == 'on' publish_on_submit = request.POST.get('publish_on_submit', "no") == "yes" image = self.snapshot_settings_form.cleaned_data['image'] if image: new_domain.image_path = image.name new_domain.image_type = image.content_type elif request.POST.get('old_image', False): new_domain.image_path = old.image_path new_domain.image_type = old.image_type new_domain.save() documentation_file = self.snapshot_settings_form.cleaned_data['documentation_file'] if documentation_file: new_domain.documentation_file_path = documentation_file.name new_domain.documentation_file_type = documentation_file.content_type elif request.POST.get('old_documentation_file', False): new_domain.documentation_file_path = old.documentation_file_path new_domain.documentation_file_type = old.documentation_file_type new_domain.save() if publish_on_submit: _publish_snapshot(request, self.domain_object, published_snapshot=new_domain) else: new_domain.published = False new_domain.save() if image: im = Image.open(image) out = cStringIO.StringIO() im.thumbnail((200, 200), Image.ANTIALIAS) im.save(out, new_domain.image_type.split('/')[-1]) new_domain.put_attachment(content=out.getvalue(), name=image.name) elif request.POST.get('old_image', False): new_domain.put_attachment(content=old.fetch_attachment(old.image_path), name=new_domain.image_path) if documentation_file: new_domain.put_attachment(content=documentation_file, name=documentation_file.name) elif request.POST.get('old_documentation_file', False): new_domain.put_attachment(content=old.fetch_attachment(old.documentation_file_path), name=new_domain.documentation_file_path) for application in new_domain.full_applications(): original_id = application.copied_from._id name_field = "%s-name" % original_id if name_field not in request.POST: continue application.name = request.POST[name_field] application.description = request.POST["%s-description" % original_id] date_picked = request.POST["%s-deployment_date" % original_id] try: date_picked = dateutil.parser.parse(date_picked) if date_picked.year > 2009: application.deployment_date = date_picked except Exception: pass application.phone_model = request.POST["%s-phone_model" % original_id] application.attribution_notes = request.POST["%s-attribution_notes" % original_id] application.user_type = request.POST["%s-user_type" % original_id] if not new_domain.multimedia_included: application.multimedia_map = {} application.save() for fixture in FixtureDataType.by_domain(new_domain.name): old_id = FixtureDataType.by_domain_tag(self.domain_object.name, fixture.tag).first()._id fixture.description = request.POST["%s-description" % old_id] fixture.save() if new_domain is None: messages.error(request, _("Version creation failed; please try again")) else: messages.success(request, (_("Created a new version of your app. This version will be posted to " "CommCare Exchange pending approval by admins.") if publish_on_submit else _("Created a new version of your app."))) return redirect(ExchangeSnapshotsView.urlname, self.domain) return self.get(request, *args, **kwargs) class ManageProjectMediaView(BaseAdminProjectSettingsView): urlname = 'domain_manage_multimedia' page_title = ugettext_lazy("Multimedia Sharing") template_name = 'domain/admin/media_manager.html' @property def project_media_data(self): return [{ 'license': m.license.type if m.license else 'public', 'shared': self.domain in m.shared_by, 'url': m.url(), 'm_id': m._id, 'tags': m.tags.get(self.domain, []), 'type': m.doc_type, } for m in self.request.project.all_media()] @property def page_context(self): return { 'media': self.project_media_data, 'licenses': LICENSES.items(), } @retry_resource(3) def post(self, request, *args, **kwargs): for m_file in request.project.all_media(): if '%s_tags' % m_file._id in request.POST: m_file.tags[self.domain] = request.POST.get('%s_tags' % m_file._id, '').split(' ') if self.domain not in m_file.shared_by and request.POST.get('%s_shared' % m_file._id, False): m_file.shared_by.append(self.domain) elif self.domain in m_file.shared_by and not request.POST.get('%s_shared' % m_file._id, False): m_file.shared_by.remove(self.domain) if '%s_license' % m_file._id in request.POST: m_file.update_or_add_license(self.domain, type=request.POST.get('%s_license' % m_file._id, 'public'), should_save=True) m_file.save() messages.success(request, _("Multimedia updated successfully!")) return self.get(request, *args, **kwargs) class RepeaterMixin(object): @property def friendly_repeater_names(self): return { 'FormRepeater': _("Forms"), 'CaseRepeater': _("Cases"), 'ShortFormRepeater': _("Form Stubs"), 'AppStructureRepeater': _("App Schema Changes"), } class DomainForwardingOptionsView(BaseAdminProjectSettingsView, RepeaterMixin): urlname = 'domain_forwarding' page_title = ugettext_lazy("Data Forwarding") template_name = 'domain/admin/domain_forwarding.html' @property def repeaters(self): available_repeaters = [ FormRepeater, CaseRepeater, ShortFormRepeater, AppStructureRepeater, ] return [(r.__name__, r.by_domain(self.domain), self.friendly_repeater_names[r.__name__]) for r in available_repeaters] @property def page_context(self): return { 'repeaters': self.repeaters, 'pending_record_count': RepeatRecord.count(self.domain), } class AddRepeaterView(BaseAdminProjectSettingsView, RepeaterMixin): urlname = 'add_repeater' page_title = ugettext_lazy("Forward Data") template_name = 'domain/admin/add_form_repeater.html' repeater_form_class = GenericRepeaterForm @property def page_url(self): return reverse(self.urlname, args=[self.domain, self.repeater_type]) @property def parent_pages(self): return [{ 'title': DomainForwardingOptionsView.page_title, 'url': reverse(DomainForwardingOptionsView.urlname, args=[self.domain]), }] @property def repeater_type(self): return self.kwargs['repeater_type'] @property def page_name(self): return "Forward %s" % self.friendly_repeater_names.get(self.repeater_type, "Data") @property @memoized def repeater_class(self): try: return receiverwrapper.models.repeater_types[self.repeater_type] except KeyError: raise Http404() @property @memoized def add_repeater_form(self): if self.request.method == 'POST': return self.repeater_form_class( self.request.POST, domain=self.domain, repeater_class=self.repeater_class ) return self.repeater_form_class( domain=self.domain, repeater_class=self.repeater_class ) @property def page_context(self): return { 'form': self.add_repeater_form, 'repeater_type': self.repeater_type, } def make_repeater(self): repeater = self.repeater_class( domain=self.domain, url=self.add_repeater_form.cleaned_data['url'], use_basic_auth=self.add_repeater_form.cleaned_data['use_basic_auth'], username=self.add_repeater_form.cleaned_data['username'], password=self.add_repeater_form.cleaned_data['password'], format=self.add_repeater_form.cleaned_data['format'] ) return repeater def post(self, request, *args, **kwargs): if self.add_repeater_form.is_valid(): repeater = self.make_repeater() repeater.save() messages.success(request, _("Forwarding set up to %s" % repeater.url)) return HttpResponseRedirect(reverse(DomainForwardingOptionsView.urlname, args=[self.domain])) return self.get(request, *args, **kwargs) class AddFormRepeaterView(AddRepeaterView): urlname = 'add_form_repeater' repeater_form_class = FormRepeaterForm @property def page_url(self): return reverse(self.urlname, args=[self.domain]) def make_repeater(self): repeater = super(AddFormRepeaterView, self).make_repeater() repeater.exclude_device_reports = self.add_repeater_form.cleaned_data['exclude_device_reports'] repeater.include_app_id_param = self.add_repeater_form.cleaned_data['include_app_id_param'] return repeater class OrgSettingsView(BaseAdminProjectSettingsView): template_name = 'domain/orgs_settings.html' urlname = 'domain_org_settings' page_title = ugettext_lazy("Organization") @method_decorator(requires_privilege_with_fallback(privileges.CROSS_PROJECT_REPORTS)) def dispatch(self, request, *args, **kwargs): return super(OrgSettingsView, self).dispatch(request, *args, **kwargs) @property def page_context(self): domain = self.domain_object org_users = [] teams = Team.get_by_domain(domain.name) for team in teams: for user in team.get_members(): user.team_id = team.get_id user.team = team.name org_users.append(user) for user in org_users: user.current_domain = domain.name all_orgs = Organization.get_all() return { "project": domain, 'domain': domain.name, "organization": Organization.get_by_name(getattr(domain, "organization", None)), "org_users": org_users, "all_orgs": all_orgs, } class BaseInternalDomainSettingsView(BaseProjectSettingsView): strict_domain_fetching = True @method_decorator(login_and_domain_required) @method_decorator(require_superuser) def dispatch(self, request, *args, **kwargs): return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs) @property def main_context(self): context = super(BaseInternalDomainSettingsView, self).main_context context.update({ 'project': self.domain_object, }) return context @property def page_name(self): return mark_safe("%s <small>Internal</small>" % self.page_title) class EditInternalDomainInfoView(BaseInternalDomainSettingsView): urlname = 'domain_internal_settings' page_title = ugettext_lazy("Project Information") template_name = 'domain/internal_settings.html' strict_domain_fetching = True @property def autocomplete_fields(self): return ['countries'] @property @memoized def internal_settings_form(self): can_edit_eula = CAN_EDIT_EULA.enabled(self.request.couch_user.username) if self.request.method == 'POST': return DomainInternalForm(can_edit_eula, self.request.POST) initial = { 'deployment_date': self.domain_object.deployment.date.date if self.domain_object.deployment.date else '', 'countries': self.domain_object.deployment.countries, 'is_test': self.domain_object.is_test, } internal_attrs = [ 'sf_contract_id', 'sf_account_id', 'services', 'initiative', 'self_started', 'area', 'sub_area', 'organization_name', 'notes', 'phone_model', 'commtrack_domain', 'business_unit', 'workshop_region', ] if can_edit_eula: internal_attrs += [ 'custom_eula', 'can_use_data', ] for attr in internal_attrs: val = getattr(self.domain_object.internal, attr) if isinstance(val, bool): val = 'true' if val else 'false' initial[attr] = val return DomainInternalForm(can_edit_eula, initial=initial) @property def page_context(self): return { 'project': self.domain_object, 'form': self.internal_settings_form, 'areas': dict([(a["name"], a["sub_areas"]) for a in settings.INTERNAL_DATA["area"]]), } def post(self, request, *args, **kwargs): if self.internal_settings_form.is_valid(): old_attrs = copy.copy(self.domain_object.internal) self.internal_settings_form.save(self.domain_object) eula_props_changed = (bool(old_attrs.custom_eula) != bool(self.domain_object.internal.custom_eula) or bool(old_attrs.can_use_data) != bool(self.domain_object.internal.can_use_data)) if eula_props_changed and settings.EULA_CHANGE_EMAIL: message = '\n'.join([ '{user} changed either the EULA or data sharing properties for domain {domain}.', '', 'The properties changed were:', '- Custom eula: {eula_old} --> {eula_new}', '- Can use data: {can_use_data_old} --> {can_use_data_new}' ]).format( user=self.request.couch_user.username, domain=self.domain, eula_old=old_attrs.custom_eula, eula_new=self.domain_object.internal.custom_eula, can_use_data_old=old_attrs.can_use_data, can_use_data_new=self.domain_object.internal.can_use_data, ) send_mail_async.delay( 'Custom EULA or data use flags changed for {}'.format(self.domain), message, settings.DEFAULT_FROM_EMAIL, [settings.EULA_CHANGE_EMAIL] ) messages.success(request, _("The internal information for project %s was successfully updated!") % self.domain) else: messages.error(request, _( "Your settings are not valid, see below for errors. Correct them and try again!")) return self.get(request, *args, **kwargs) class EditInternalCalculationsView(BaseInternalDomainSettingsView): urlname = 'domain_internal_calculations' page_title = ugettext_lazy("Calculated Properties") template_name = 'domain/internal_calculations.html' @property def page_context(self): return { 'calcs': CALCS, 'order': CALC_ORDER, } @login_and_domain_required @require_superuser def calculated_properties(request, domain): calc_tag = request.GET.get("calc_tag", '').split('--') extra_arg = calc_tag[1] if len(calc_tag) > 1 else '' calc_tag = calc_tag[0] if not calc_tag or calc_tag not in CALC_FNS.keys(): data = {"error": 'This tag does not exist'} else: data = {"value": dom_calc(calc_tag, domain, extra_arg)} return json_response(data) def _publish_snapshot(request, domain, published_snapshot=None): snapshots = domain.snapshots() for snapshot in snapshots: if snapshot.published: snapshot.published = False if not published_snapshot or snapshot.name != published_snapshot.name: snapshot.save() if published_snapshot: if published_snapshot.copied_from.name != domain.name: messages.error(request, "Invalid snapshot") return False # cda stuff. In order to publish a snapshot, a user must have agreed to this published_snapshot.cda.signed = True published_snapshot.cda.date = datetime.datetime.utcnow() published_snapshot.cda.type = 'Content Distribution Agreement' if request.couch_user: published_snapshot.cda.user_id = request.couch_user.get_id published_snapshot.cda.user_ip = get_ip(request) published_snapshot.published = True published_snapshot.save() _notification_email_on_publish(domain, published_snapshot, request.couch_user) return True def _notification_email_on_publish(domain, snapshot, published_by): params = {"domain": domain, "snapshot": snapshot, "published_by": published_by, "url_base": get_site_domain()} text_content = render_to_string( "domain/email/published_app_notification.txt", params) html_content = render_to_string( "domain/email/published_app_notification.html", params) recipients = settings.EXCHANGE_NOTIFICATION_RECIPIENTS subject = "New App on Exchange: %s" % snapshot.title try: for recipient in recipients: send_html_email_async.delay(subject, recipient, html_content, text_content=text_content, email_from=settings.DEFAULT_FROM_EMAIL) except Exception: logging.warning("Can't send notification email, " "but the message was:\n%s" % text_content) @domain_admin_required def set_published_snapshot(request, domain, snapshot_name=''): domain = request.project snapshots = domain.snapshots() if request.method == 'POST': if snapshot_name != '': published_snapshot = Domain.get_by_name(snapshot_name) _publish_snapshot(request, domain, published_snapshot=published_snapshot) else: _publish_snapshot(request, domain) return redirect('domain_snapshot_settings', domain.name) class ProBonoMixin(): page_title = ugettext_lazy("Pro-Bono Application") is_submitted = False url_name = None @property def requesting_domain(self): raise NotImplementedError @property @memoized def pro_bono_form(self): if self.request.method == 'POST': return ProBonoForm(self.use_domain_field, self.request.POST) return ProBonoForm(self.use_domain_field) @property def page_context(self): return { 'pro_bono_form': self.pro_bono_form, 'is_submitted': self.is_submitted, } @property def page_url(self): return self.url_name def post(self, request, *args, **kwargs): if self.pro_bono_form.is_valid(): self.pro_bono_form.process_submission(domain=self.requesting_domain) self.is_submitted = True return self.get(request, *args, **kwargs) class ProBonoStaticView(ProBonoMixin, BasePageView): template_name = 'domain/pro_bono/static.html' urlname = 'pro_bono_static' use_domain_field = True @property def requesting_domain(self): return self.pro_bono_form.cleaned_data['domain'] class ProBonoView(ProBonoMixin, DomainAccountingSettings): template_name = 'domain/pro_bono/domain.html' urlname = 'pro_bono' use_domain_field = False @property def requesting_domain(self): return self.domain @property def parent_pages(self): return [ { 'title': DomainSubscriptionView.page_title, 'url': reverse(DomainSubscriptionView.urlname, args=[self.domain]), } ] @property def section_url(self): return self.page_url class FeaturePreviewsView(BaseAdminProjectSettingsView): urlname = 'feature_previews' page_title = ugettext_lazy("Feature Previews") template_name = 'domain/admin/feature_previews.html' @memoized def features(self): features = [] for preview_name in dir(feature_previews): if not preview_name.startswith('__'): preview = getattr(feature_previews, preview_name) if isinstance(preview, feature_previews.FeaturePreview) and preview.has_privilege(self.request): features.append((preview, preview.enabled(self.domain))) return features def get_toggle(self, slug): if not slug in [f.slug for f, _ in self.features()]: raise Http404() try: return Toggle.get(slug) except ResourceNotFound: return Toggle(slug=slug) @property def page_context(self): return { 'features': self.features(), } def post(self, request, *args, **kwargs): for feature, enabled in self.features(): self.update_feature(feature, enabled, feature.slug in request.POST) return redirect('feature_previews', domain=self.domain) def update_feature(self, feature, current_state, new_state): if current_state != new_state: feature.set(self.domain, new_state, NAMESPACE_DOMAIN) if feature.save_fn is not None: feature.save_fn(self.domain, new_state) class FeatureFlagsView(BaseAdminProjectSettingsView): urlname = 'domain_feature_flags' page_title = ugettext_lazy("Feature Flags") template_name = 'domain/admin/feature_flags.html' @method_decorator(require_superuser) def dispatch(self, request, *args, **kwargs): return super(FeatureFlagsView, self).dispatch(request, *args, **kwargs) @memoized def enabled_flags(self): def _sort_key(toggle_enabled_tuple): return (not toggle_enabled_tuple[1], not toggle_enabled_tuple[2], toggle_enabled_tuple[0].label) return sorted( [(toggle, toggle.enabled(self.domain), toggle.enabled(self.request.couch_user.username)) for toggle in all_toggles()], key=_sort_key, ) @property def page_context(self): return { 'flags': self.enabled_flags(), } class TransferDomainView(BaseAdminProjectSettingsView): urlname = 'transfer_domain_view' page_title = ugettext_lazy("Transfer Project") template_name = 'domain/admin/transfer_domain.html' @property @memoized def active_transfer(self): return TransferDomainRequest.get_active_transfer(self.domain, self.request.user.username) @property @memoized def transfer_domain_form(self): return TransferDomainForm(self.domain, self.request.user.username, self.request.POST or None) def get(self, request, *args, **kwargs): if self.active_transfer: self.template_name = 'domain/admin/transfer_domain_pending.html' if request.GET.get('resend', None): self.active_transfer.send_transfer_request() messages.info(request, _(u"Resent transfer request for project '{domain}'").format(domain=self.domain)) return super(TransferDomainView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): form = self.transfer_domain_form if form.is_valid(): # Initiate domain transfer transfer = form.save() transfer.send_transfer_request() return HttpResponseRedirect(self.page_url) context = self.get_context_data(**kwargs) return self.render_to_response(context) @property def page_context(self): if self.active_transfer: return {'transfer': self.active_transfer.as_dict()} else: return {'form': self.transfer_domain_form} @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): if not TRANSFER_DOMAIN.enabled(request.domain): raise Http404() return super(TransferDomainView, self).dispatch(request, *args, **kwargs) class ActivateTransferDomainView(BasePageView): urlname = 'activate_transfer_domain' page_title = 'Activate Domain Transfer' template_name = 'domain/activate_transfer_domain.html' @property @memoized def active_transfer(self): return TransferDomainRequest.get_by_guid(self.guid) @property def page_context(self): if self.active_transfer: return {'transfer': self.active_transfer.as_dict()} else: return {} @property def page_url(self): return self.request.get_full_path() def get(self, request, guid, *args, **kwargs): self.guid = guid if (self.active_transfer and self.active_transfer.to_username != request.user.username and not request.user.is_superuser): return HttpResponseRedirect(reverse("no_permissions")) return super(ActivateTransferDomainView, self).get(request, *args, **kwargs) def post(self, request, guid, *args, **kwargs): self.guid = guid if not self.active_transfer: raise Http404() if self.active_transfer.to_username != request.user.username and not request.user.is_superuser: return HttpResponseRedirect(reverse("no_permissions")) self.active_transfer.transfer_domain(ip=get_ip(request)) messages.success(request, _(u"Successfully transferred ownership of project '{domain}'") .format(domain=self.active_transfer.domain)) return HttpResponseRedirect(reverse('dashboard_default', args=[self.active_transfer.domain])) @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(ActivateTransferDomainView, self).dispatch(*args, **kwargs) class DeactivateTransferDomainView(View): def post(self, request, guid, *args, **kwargs): transfer = TransferDomainRequest.get_by_guid(guid) if not transfer: return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if (transfer.to_username != request.user.username and transfer.from_username != request.user.username and not request.user.is_superuser): return HttpResponseRedirect(reverse("no_permissions")) transfer.active = False transfer.save() referer = request.META.get('HTTP_REFERER', '/') # Do not want to send them back to the activate page if referer.endswith(reverse('activate_transfer_domain', args=[guid])): messages.info(request, _(u"Declined ownership of project '{domain}'").format(domain=transfer.domain)) return HttpResponseRedirect('/') else: return HttpResponseRedirect(referer) @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(DeactivateTransferDomainView, self).dispatch(*args, **kwargs) from corehq.apps.smsbillables.forms import PublicSMSRateCalculatorForm from corehq.apps.smsbillables.async_handlers import PublicSMSRatesAsyncHandler class PublicSMSRatesView(BasePageView, AsyncHandlerMixin): urlname = 'public_sms_rates_view' page_title = ugettext_lazy("SMS Rate Calculator") template_name = 'domain/admin/global_sms_rates.html' async_handlers = [PublicSMSRatesAsyncHandler] @property def page_url(self): return reverse(self.urlname) @property def page_context(self): return { 'rate_calc_form': PublicSMSRateCalculatorForm() } def post(self, request, *args, **kwargs): return self.async_response or self.get(request, *args, **kwargs) class SMSRatesView(BaseAdminProjectSettingsView, AsyncHandlerMixin): urlname = 'domain_sms_rates_view' page_title = ugettext_lazy("SMS Rate Calculator") template_name = 'domain/admin/sms_rates.html' async_handlers = [ SMSRatesAsyncHandler, SMSRatesSelect2AsyncHandler, ] @property @memoized def rate_calc_form(self): if self.request.method == 'POST': return SMSRateCalculatorForm(self.domain, self.request.POST) return SMSRateCalculatorForm(self.domain) @property def page_context(self): return { 'rate_calc_form': self.rate_calc_form, } def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response return self.get(request, *args, **kwargs) @require_POST @domain_admin_required def org_request(request, domain): org_name = request.POST.get("org_name", None) org = Organization.get_by_name(org_name) if org: org_request = OrgRequest.get_requests(org_name, domain=domain, user_id=request.couch_user.get_id) if not org_request: org_request = OrgRequest(organization=org_name, domain=domain, requested_by=request.couch_user.get_id, requested_on=datetime.datetime.utcnow()) org_request.save() _send_request_notification_email(request, org, domain) messages.success(request, "Your request was submitted. The admin of organization %s can now choose to manage the project %s" % (org_name, domain)) else: messages.error(request, "You've already submitted a request to this organization") else: messages.error(request, "The organization '%s' does not exist" % org_name) return HttpResponseRedirect(reverse('domain_org_settings', args=[domain])) def _send_request_notification_email(request, org, dom): params = {"org": org, "dom": dom, "requestee": request.couch_user, "url_base": get_site_domain()} text_content = render_to_string( "domain/email/org_request_notification.txt", params) html_content = render_to_string( "domain/email/org_request_notification.html", params) recipients = [member.email for member in org.get_members() if member.is_org_admin(org.name)] subject = "New request to add a project to your organization! -- CommcareHQ" try: for recipient in recipients: send_html_email_async.delay(subject, recipient, html_content, text_content=text_content, email_from=settings.DEFAULT_FROM_EMAIL) except Exception: logging.warning("Can't send notification email, " "but the message was:\n%s" % text_content) class BaseCardView(DomainAccountingSettings): @property def payment_method(self): payment_method, __ = StripePaymentMethod.objects.get_or_create( web_user=self.request.user.username, method_type=PaymentMethodType.STRIPE, ) return payment_method def _generic_error(self): error = ("Something went wrong while processing your request. " "We're working quickly to resolve the issue. " "Please try again in a few hours.") return json_response({'error': error}, status_code=500) def _stripe_error(self, e): body = e.json_body err = body['error'] return json_response({'error': err['message'], 'cards': self.payment_method.all_cards_serialized(self.account)}, status_code=502) class CardView(BaseCardView): """View for dealing with a single Credit Card""" url_name = "card_view" def post(self, request, domain, card_token): try: card = self.payment_method.get_card(card_token) if request.POST.get("is_autopay") == 'true': self.payment_method.set_autopay(card, self.account) elif request.POST.get("is_autopay") == 'false': self.payment_method.unset_autopay(card, self.account) except self.payment_method.STRIPE_GENERIC_ERROR as e: return self._stripe_error(e) except Exception as e: return self._generic_error() return json_response({'cards': self.payment_method.all_cards_serialized(self.account)}) def delete(self, request, domain, card_token): try: self.payment_method.remove_card(card_token) except self.payment_method.STRIPE_GENERIC_ERROR as e: return self._stripe_error(e) return json_response({'cards': self.payment_method.all_cards_serialized(self.account)}) class CardsView(BaseCardView): """View for dealing Credit Cards""" url_name = "cards_view" def get(self, request, domain): return json_response({'cards': self.payment_method.all_cards_serialized(self.account)}) def post(self, request, domain): stripe_token = request.POST.get('token') autopay = request.POST.get('autopay') == 'true' try: self.payment_method.create_card(stripe_token, self.account, autopay) except self.payment_method.STRIPE_GENERIC_ERROR as e: return self._stripe_error(e) except Exception as e: return self._generic_error() return json_response({'cards': self.payment_method.all_cards_serialized(self.account)})
import copy import datetime from decimal import Decimal import logging import uuid import json import cStringIO from couchdbkit import ResourceNotFound import dateutil from django.core.paginator import Paginator from django.views.generic import View from django.db.models import Sum from django.conf import settings from django.template.loader import render_to_string from django.utils.decorators import method_decorator from django.utils.safestring import mark_safe from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.shortcuts import redirect, render from django.contrib import messages from django.views.decorators.http import require_POST from PIL import Image from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy from corehq.const import USER_DATE_FORMAT from custom.dhis2.forms import Dhis2SettingsForm from custom.dhis2.models import Dhis2Settings from casexml.apps.case.mock import CaseBlock from casexml.apps.case.xml import V2 from corehq.apps.accounting.async_handlers import Select2BillingInfoHandler from corehq.apps.accounting.invoicing import DomainWireInvoiceFactory from corehq.apps.accounting.decorators import ( requires_privilege_with_fallback, ) from corehq.apps.hqwebapp.tasks import send_mail_async from corehq.apps.accounting.exceptions import ( NewSubscriptionError, PaymentRequestError, ) from corehq.apps.accounting.payment_handlers import ( BulkStripePaymentHandler, CreditStripePaymentHandler, InvoiceStripePaymentHandler, ) from corehq.apps.accounting.subscription_changes import DomainDowngradeStatusHandler from corehq.apps.accounting.forms import EnterprisePlanContactForm from corehq.apps.accounting.utils import ( get_change_status, get_privileges, fmt_dollar_amount, quantize_accounting_decimal, get_customer_cards, ) from corehq.apps.hqwebapp.async_handler import AsyncHandlerMixin from corehq.apps.smsbillables.async_handlers import SMSRatesAsyncHandler, SMSRatesSelect2AsyncHandler from corehq.apps.smsbillables.forms import SMSRateCalculatorForm from corehq.apps.users.models import DomainInvitation from corehq.apps.fixtures.models import FixtureDataType from corehq.toggles import NAMESPACE_DOMAIN, all_toggles, CAN_EDIT_EULA, TRANSFER_DOMAIN from corehq.util.context_processors import get_domain_type from dimagi.utils.couch.resource_conflict import retry_resource from corehq import privileges, feature_previews from django_prbac.utils import has_privilege from corehq.apps.accounting.models import ( Subscription, CreditLine, SoftwareProductType, SubscriptionType, DefaultProductPlan, SoftwarePlanEdition, BillingAccount, BillingAccountType, Invoice, BillingRecord, InvoicePdf, PaymentMethodType, PaymentMethod, EntryPoint, WireInvoice, SoftwarePlanVisibility, FeatureType, StripePaymentMethod, ) from corehq.apps.accounting.usage import FeatureUsageCalculator from corehq.apps.accounting.user_text import ( get_feature_name, PricingTable, DESC_BY_EDITION, get_feature_recurring_interval, ) from corehq.apps.hqwebapp.models import ProjectSettingsTab from corehq.apps import receiverwrapper from corehq.apps.domain.calculations import CALCS, CALC_FNS, CALC_ORDER, dom_calc from corehq.apps.domain.decorators import ( domain_admin_required, login_required, require_superuser, login_and_domain_required ) from corehq.apps.domain.forms import ( DomainGlobalSettingsForm, DomainMetadataForm, SnapshotSettingsForm, SnapshotApplicationForm, DomainInternalForm, PrivacySecurityForm, ConfirmNewSubscriptionForm, ProBonoForm, EditBillingAccountInfoForm, ConfirmSubscriptionRenewalForm, SnapshotFixtureForm, TransferDomainForm, SelectSubscriptionTypeForm, INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS) from corehq.apps.domain.models import Domain, LICENSES, TransferDomainRequest from corehq.apps.domain.utils import normalize_domain_name from corehq.apps.hqwebapp.views import BaseSectionPageView, BasePageView, CRUDPaginatedViewMixin from corehq.apps.orgs.models import Organization, OrgRequest, Team from corehq.apps.domain.forms import ProjectSettingsForm from dimagi.utils.decorators.memoized import memoized from dimagi.utils.web import get_ip, json_response, get_site_domain from corehq.apps.users.decorators import require_can_edit_web_users from corehq.apps.receiverwrapper.forms import GenericRepeaterForm, FormRepeaterForm from corehq.apps.receiverwrapper.models import FormRepeater, CaseRepeater, ShortFormRepeater, AppStructureRepeater, \ RepeatRecord from dimagi.utils.post import simple_post from toggle.models import Toggle from corehq.apps.hqwebapp.tasks import send_html_email_async accounting_logger = logging.getLogger('accounting') PAYMENT_ERROR_MESSAGES = { 400: ugettext_lazy('Your request was not formatted properly.'), 403: ugettext_lazy('Forbidden.'), 404: ugettext_lazy('Page not found.'), 500: ugettext_lazy("There was an error processing your request." " We're working quickly to fix the issue. Please try again shortly."), } # Domain not required here - we could be selecting it for the first time. See notes domain.decorators # about why we need this custom login_required decorator @login_required def select(request, domain_select_template='domain/select.html', do_not_redirect=False): domains_for_user = Domain.active_for_user(request.user) if not domains_for_user: return redirect('registration_domain', domain_type=get_domain_type(None, request)) email = request.couch_user.get_email() open_invitations = [e for e in DomainInvitation.by_email(email) if not e.is_expired] additional_context = { 'domains_for_user': domains_for_user, 'open_invitations': open_invitations, } last_visited_domain = request.session.get('last_visited_domain') if open_invitations \ or do_not_redirect \ or not last_visited_domain: return render(request, domain_select_template, additional_context) else: domain = Domain.get_by_name(last_visited_domain) if domain and domain.is_active: # mirrors logic in login_and_domain_required if ( request.couch_user.is_member_of(domain) or domain.is_public or (request.user.is_superuser and not domain.restrict_superusers) or domain.is_snapshot ): try: from corehq.apps.dashboard.views import dashboard_default return dashboard_default(request, last_visited_domain) except Http404: pass del request.session['last_visited_domain'] return render(request, domain_select_template, additional_context) @require_superuser def incomplete_email(request, incomplete_email_template='domain/incomplete_email.html'): from corehq.apps.domain.tasks import ( incomplete_self_started_domains, incomplete_domains_to_email ) context = { 'self_started': incomplete_self_started_domains, 'dimagi_owned': incomplete_domains_to_email, } return render(request, incomplete_email_template, context) class DomainViewMixin(object): """ Paving the way for a world of entirely class-based views. Let's do this, guys. :-) Set strict_domain_fetching to True in subclasses to bypass the cache. """ strict_domain_fetching = False @property @memoized def domain(self): domain = self.args[0] if len(self.args) > 0 else self.kwargs.get('domain', "") return normalize_domain_name(domain) @property @memoized def domain_object(self): domain = Domain.get_by_name(self.domain, strict=self.strict_domain_fetching) if not domain: raise Http404() return domain class LoginAndDomainMixin(object): @method_decorator(login_and_domain_required) def dispatch(self, *args, **kwargs): return super(LoginAndDomainMixin, self).dispatch(*args, **kwargs) class SubscriptionUpgradeRequiredView(LoginAndDomainMixin, BasePageView, DomainViewMixin): page_title = ugettext_lazy("Upgrade Required") template_name = "domain/insufficient_privilege_notification.html" @property def page_url(self): return self.request.get_full_path @property def page_name(self): return _("Sorry, you do not have access to %(feature_name)s") % { 'feature_name': self.feature_name, } @property def is_domain_admin(self): if not hasattr(self.request, 'couch_user'): return False return self.request.couch_user.is_domain_admin(self.domain) @property def page_context(self): return { 'domain': self.domain, 'feature_name': self.feature_name, 'plan_name': self.required_plan_name, 'change_subscription_url': reverse(SelectPlanView.urlname, args=[self.domain]), 'is_domain_admin': self.is_domain_admin, } @property def missing_privilege(self): return self.args[1] @property def feature_name(self): return privileges.Titles.get_name_from_privilege(self.missing_privilege) @property def required_plan_name(self): return DefaultProductPlan.get_lowest_edition_by_domain( self.domain_object, [self.missing_privilege] ) def get(self, request, *args, **kwargs): self.request = request self.args = args return super(SubscriptionUpgradeRequiredView, self).get( request, *args, **kwargs ) class BaseDomainView(LoginAndDomainMixin, BaseSectionPageView, DomainViewMixin): @property def main_context(self): main_context = super(BaseDomainView, self).main_context main_context.update({ 'domain': self.domain, }) return main_context @property @memoized def page_url(self): if self.urlname: return reverse(self.urlname, args=[self.domain]) class BaseProjectSettingsView(BaseDomainView): section_name = ugettext_lazy("Project Settings") template_name = "settings/base_template.html" @property def main_context(self): main_context = super(BaseProjectSettingsView, self).main_context main_context.update({ 'active_tab': ProjectSettingsTab( self.request, self.urlname, domain=self.domain, couch_user=self.request.couch_user, project=self.request.project ), 'is_project_settings': True, }) return main_context @property @memoized def section_url(self): return reverse(EditMyProjectSettingsView.urlname, args=[self.domain]) class DefaultProjectSettingsView(BaseDomainView): urlname = 'domain_settings_default' def get(self, request, *args, **kwargs): if request.couch_user.is_domain_admin(self.domain): return HttpResponseRedirect(reverse(EditBasicProjectInfoView.urlname, args=[self.domain])) return HttpResponseRedirect(reverse(EditMyProjectSettingsView.urlname, args=[self.domain])) class BaseAdminProjectSettingsView(BaseProjectSettingsView): """ The base class for all project settings views that require administrative access. """ @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): return super(BaseProjectSettingsView, self).dispatch(request, *args, **kwargs) class BaseEditProjectInfoView(BaseAdminProjectSettingsView): """ The base class for all the edit project information views. """ strict_domain_fetching = True @property def autocomplete_fields(self): return [] @property def main_context(self): context = super(BaseEditProjectInfoView, self).main_context context.update({ 'autocomplete_fields': self.autocomplete_fields, 'commtrack_enabled': self.domain_object.commtrack_enabled, # ideally the template gets access to the domain doc through # some other means. otherwise it has to be supplied to every view reachable in that sidebar (every # view whose template extends users_base.html); mike says he's refactoring all of this imminently, so # i will not worry about it until he is done 'call_center_enabled': self.domain_object.call_center_config.enabled, 'cloudcare_releases': self.domain_object.cloudcare_releases, }) return context class EditBasicProjectInfoView(BaseEditProjectInfoView): template_name = 'domain/admin/info_basic.html' urlname = 'domain_basic_info' page_title = ugettext_lazy("Basic") @property def can_user_see_meta(self): return self.request.couch_user.is_previewer() @property def can_use_custom_logo(self): return has_privilege(self.request, privileges.CUSTOM_BRANDING) @property @memoized def basic_info_form(self): initial = { 'hr_name': self.domain_object.hr_name or self.domain_object.name, 'default_timezone': self.domain_object.default_timezone, 'case_sharing': json.dumps(self.domain_object.case_sharing), 'call_center_enabled': self.domain_object.call_center_config.enabled, 'call_center_type': self.initial_call_center_type, 'call_center_case_owner': self.initial_call_center_case_owner, 'call_center_case_type': self.domain_object.call_center_config.case_type, 'commtrack_enabled': self.domain_object.commtrack_enabled, } if self.request.method == 'POST': if self.can_user_see_meta: return DomainMetadataForm( self.request.POST, self.request.FILES, user=self.request.couch_user, domain=self.domain_object.name, can_use_custom_logo=self.can_use_custom_logo, ) return DomainGlobalSettingsForm( self.request.POST, self.request.FILES, domain=self.domain_object.name, can_use_custom_logo=self.can_use_custom_logo ) if self.can_user_see_meta: initial.update({ 'is_test': self.domain_object.is_test, 'cloudcare_releases': self.domain_object.cloudcare_releases, }) return DomainMetadataForm( can_use_custom_logo=self.can_use_custom_logo, user=self.request.couch_user, domain=self.domain_object.name, initial=initial ) return DomainGlobalSettingsForm( initial=initial, domain=self.domain_object.name, can_use_custom_logo=self.can_use_custom_logo ) @property @memoized def initial_call_center_case_owner(self): config = self.domain_object.call_center_config if config.use_user_location_as_owner: return DomainGlobalSettingsForm.USE_LOCATIONS_CHOICE return self.domain_object.call_center_config.case_owner_id @property @memoized def initial_call_center_type(self): if self.domain_object.call_center_config.use_fixtures: return DomainGlobalSettingsForm.CASES_AND_FIXTURES_CHOICE return DomainGlobalSettingsForm.CASES_ONLY_CHOICE @property def page_context(self): return { 'basic_info_form': self.basic_info_form, } def post(self, request, *args, **kwargs): if self.basic_info_form.is_valid(): if self.basic_info_form.save(request, self.domain_object): messages.success(request, _("Project settings saved!")) else: messages.error(request, _("There seems to have been an error saving your settings. Please try again!")) return self.get(request, *args, **kwargs) class EditMyProjectSettingsView(BaseProjectSettingsView): template_name = 'domain/admin/my_project_settings.html' urlname = 'my_project_settings' page_title = ugettext_lazy("My Timezone") @property @memoized def my_project_settings_form(self): initial = { 'global_timezone': self.domain_object.default_timezone } if self.domain_membership: initial.update({ 'override_global_tz': self.domain_membership.override_global_tz, 'user_timezone': (self.domain_membership.timezone if self.domain_membership.override_global_tz else self.domain_object.default_timezone), }) else: initial.update({ 'override_global_tz': False, 'user_timezone': initial["global_timezone"], }) if self.request.method == 'POST': return ProjectSettingsForm(self.request.POST, initial=initial) return ProjectSettingsForm(initial=initial) @property @memoized def domain_membership(self): return self.request.couch_user.get_domain_membership(self.domain) @property def page_context(self): return { 'my_project_settings_form': self.my_project_settings_form, 'override_global_tz': self.domain_membership.override_global_tz if self.domain_membership else False, 'no_domain_membership': not self.domain_membership, } def post(self, request, *args, **kwargs): if self.my_project_settings_form.is_valid(): self.my_project_settings_form.save(self.request.couch_user, self.domain) messages.success(request, _("Your project settings have been saved!")) return self.get(request, *args, **kwargs) class EditDhis2SettingsView(BaseProjectSettingsView): template_name = 'domain/admin/dhis2_settings.html' urlname = 'dhis2_settings' page_title = ugettext_lazy("DHIS2 API settings") @property @memoized def dhis2_settings_form(self): settings_ = Dhis2Settings.for_domain(self.domain_object.name) initial = settings_.dhis2 if settings_ else {'enabled': False} if self.request.method == 'POST': return Dhis2SettingsForm(self.request.POST, initial=initial) return Dhis2SettingsForm(initial=initial) @property def page_context(self): return { 'dhis2_settings_form': self.dhis2_settings_form, } def post(self, request, *args, **kwargs): if self.dhis2_settings_form.is_valid(): if self.dhis2_settings_form.save(self.domain_object): messages.success(request, _('DHIS2 API settings successfully updated')) else: messages.error(request, _('There seems to have been an error. Please try again.')) return self.get(request, *args, **kwargs) @require_POST @require_can_edit_web_users def drop_repeater(request, domain, repeater_id): rep = FormRepeater.get(repeater_id) rep.retire() messages.success(request, "Form forwarding stopped!") return HttpResponseRedirect(reverse(DomainForwardingOptionsView.urlname, args=[domain])) @require_POST @require_can_edit_web_users def test_repeater(request, domain): url = request.POST["url"] repeater_type = request.POST['repeater_type'] format = request.POST['format'] form = GenericRepeaterForm( {"url": url, "format": format}, domain=domain, repeater_class=receiverwrapper.models.repeater_types[repeater_type] ) if form.is_valid(): url = form.cleaned_data["url"] # now we fake a post def _stub(repeater_type): if 'case' in repeater_type.lower(): return CaseBlock( case_id='test-case-%s' % uuid.uuid4().hex, create=True, case_type='test', case_name='test case', ).as_string() else: return "<?xml version='1.0' ?><data id='test'><TestString>Test post from CommCareHQ on %s</TestString></data>" % \ (datetime.datetime.utcnow()) fake_post = _stub(repeater_type) try: resp = simple_post(fake_post, url) if 200 <= resp.status < 300: return HttpResponse(json.dumps({"success": True, "response": resp.read(), "status": resp.status})) else: return HttpResponse(json.dumps({"success": False, "response": resp.read(), "status": resp.status})) except Exception, e: errors = str(e) return HttpResponse(json.dumps({"success": False, "response": errors})) else: return HttpResponse(json.dumps({"success": False, "response": "Please enter a valid url."})) def autocomplete_fields(request, field): prefix = request.GET.get('prefix', '') results = Domain.field_by_prefix(field, prefix) return HttpResponse(json.dumps(results)) def logo(request, domain): logo = Domain.get_by_name(domain).get_custom_logo() if logo is None: raise Http404() return HttpResponse(logo[0], content_type=logo[1]) class DomainAccountingSettings(BaseAdminProjectSettingsView): @method_decorator(login_and_domain_required) def dispatch(self, request, *args, **kwargs): return super(DomainAccountingSettings, self).dispatch(request, *args, **kwargs) @property @memoized def product(self): return SoftwareProductType.get_type_by_domain(self.domain_object) @property @memoized def account(self): return BillingAccount.get_account_by_domain(self.domain) @property def current_subscription(self): return Subscription.get_subscribed_plan_by_domain(self.domain_object)[1] class DomainSubscriptionView(DomainAccountingSettings): urlname = 'domain_subscription_view' template_name = 'domain/current_subscription.html' page_title = ugettext_lazy("Current Subscription") @property def can_purchase_credits(self): return self.request.couch_user.is_domain_admin(self.domain) @property def plan(self): plan_version, subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object) date_end = None next_subscription = { 'exists': False, 'can_renew': False, 'name': None, 'price': None, } cards = None general_credits = None if subscription: cards = get_customer_cards(self.account, self.request.user.username, self.domain) date_end = (subscription.date_end.strftime(USER_DATE_FORMAT) if subscription.date_end is not None else "--") if subscription.date_end is not None: if subscription.is_renewed: next_product = self.get_product_summary(subscription.next_subscription.plan_version, self.account, subscription) next_subscription.update({ 'exists': True, 'date_start': subscription.next_subscription.date_start.strftime(USER_DATE_FORMAT), 'name': subscription.next_subscription.plan_version.plan.name, 'price': next_product['monthly_fee'], }) else: days_left = (subscription.date_end - datetime.date.today()).days next_subscription.update({ 'can_renew': days_left <= 30, 'renew_url': reverse(SubscriptionRenewalView.urlname, args=[self.domain]), }) general_credits = CreditLine.get_credits_by_subscription_and_features(subscription) elif self.account is not None: general_credits = CreditLine.get_credits_for_account(self.account) if general_credits: general_credits = self._fmt_credit(self._credit_grand_total(general_credits)) info = { 'products': [self.get_product_summary(plan_version, self.account, subscription)], 'features': self.get_feature_summary(plan_version, self.account, subscription), 'general_credit': general_credits, 'css_class': "label-plan %s" % plan_version.plan.edition.lower(), 'do_not_invoice': subscription.do_not_invoice if subscription is not None else False, 'is_trial': subscription.is_trial if subscription is not None else False, 'date_start': (subscription.date_start.strftime(USER_DATE_FORMAT) if subscription is not None else None), 'date_end': date_end, 'cards': cards, 'next_subscription': next_subscription, } info.update(plan_version.user_facing_description) return info def _fmt_credit(self, credit_amount=None): if credit_amount is None: return { 'amount': "--", } return { 'amount': fmt_dollar_amount(credit_amount), 'is_visible': credit_amount != Decimal('0.0'), } def _credit_grand_total(self, credit_lines): return sum([c.balance for c in credit_lines]) if credit_lines else Decimal('0.00') def get_product_summary(self, plan_version, account, subscription): product_rates = plan_version.product_rates.all() if len(product_rates) > 1: # Models and UI are both written to support multiple products, # but for now, each subscription can only have one product. accounting_logger.error( "[BILLING] " "There seem to be multiple ACTIVE NEXT subscriptions for the subscriber %s. " "Odd, right? The latest one by date_created was used, but consider this an issue." % self.account ) product_rate = product_rates[0] product_info = { 'name': product_rate.product.product_type, 'monthly_fee': _("USD %s /month") % product_rate.monthly_fee, 'credit': None, 'type': product_rate.product.product_type, } credit_lines = None if subscription is not None: credit_lines = CreditLine.get_credits_by_subscription_and_features( subscription, product_type=product_rate.product.product_type ) elif account is not None: credit_lines = CreditLine.get_credits_for_account( account, product_type=product_rate.product.product_type ) if credit_lines: product_info['credit'] = self._fmt_credit(self._credit_grand_total(credit_lines)) return product_info def get_feature_summary(self, plan_version, account, subscription): feature_summary = [] for feature_rate in plan_version.feature_rates.all(): usage = FeatureUsageCalculator(feature_rate, self.domain).get_usage() feature_info = { 'name': get_feature_name(feature_rate.feature.feature_type, self.product), 'usage': usage, 'remaining': ( feature_rate.monthly_limit - usage if feature_rate.monthly_limit != -1 else _('Unlimited') ), 'credit': self._fmt_credit(), 'type': feature_rate.feature.feature_type, 'recurring_interval': get_feature_recurring_interval(feature_rate.feature.feature_type), } credit_lines = None if subscription is not None: credit_lines = CreditLine.get_credits_by_subscription_and_features( subscription, feature_type=feature_rate.feature.feature_type ) elif account is not None: credit_lines = CreditLine.get_credits_for_account( account, feature_type=feature_rate.feature.feature_type) if credit_lines: feature_info['credit'] = self._fmt_credit(self._credit_grand_total(credit_lines)) feature_summary.append(feature_info) return feature_summary @property def page_context(self): return { 'plan': self.plan, 'change_plan_url': reverse(SelectPlanView.urlname, args=[self.domain]), 'can_purchase_credits': self.can_purchase_credits, 'credit_card_url': reverse(CreditsStripePaymentView.urlname, args=[self.domain]), 'wire_url': reverse(CreditsWireInvoiceView.urlname, args=[self.domain]), 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'payment_error_messages': PAYMENT_ERROR_MESSAGES, 'sms_rate_calc_url': reverse(SMSRatesView.urlname, args=[self.domain]), 'user_email': self.request.couch_user.username, } class EditExistingBillingAccountView(DomainAccountingSettings, AsyncHandlerMixin): template_name = 'domain/update_billing_contact_info.html' urlname = 'domain_update_billing_info' page_title = ugettext_lazy("Billing Information") async_handlers = [ Select2BillingInfoHandler, ] @property @memoized def billing_info_form(self): if self.request.method == 'POST': return EditBillingAccountInfoForm( self.account, self.domain, self.request.couch_user.username, data=self.request.POST ) return EditBillingAccountInfoForm(self.account, self.domain, self.request.couch_user.username) def dispatch(self, request, *args, **kwargs): if self.account is None: raise Http404() return super(EditExistingBillingAccountView, self).dispatch(request, *args, **kwargs) @property def page_context(self): return { 'billing_account_info_form': self.billing_info_form, 'cards': self._get_cards(), 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'card_base_url': reverse(CardsView.url_name, args=[self.domain]), } def _get_cards(self): user = self.request.user.username payment_method, new_payment_method = StripePaymentMethod.objects.get_or_create( web_user=user, method_type=PaymentMethodType.STRIPE, ) return payment_method.all_cards_serialized(self.account) def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response if self.billing_info_form.is_valid(): is_saved = self.billing_info_form.save() if not is_saved: messages.error( request, _("It appears that there was an issue updating your contact information. " "We've been notified of the issue. Please try submitting again, and if the problem " "persists, please try in a few hours.")) else: messages.success( request, _("Billing contact information was successfully updated.") ) return HttpResponseRedirect(reverse(EditExistingBillingAccountView.urlname, args=[self.domain])) return self.get(request, *args, **kwargs) class DomainBillingStatementsView(DomainAccountingSettings, CRUDPaginatedViewMixin): template_name = 'domain/billing_statements.html' urlname = 'domain_billing_statements' page_title = ugettext_lazy("Billing Statements") limit_text = ugettext_lazy("statements per page") empty_notification = ugettext_lazy("No Billing Statements match the current criteria.") loading_message = ugettext_lazy("Loading statements...") @property def parameters(self): return self.request.POST if self.request.method == 'POST' else self.request.GET @property def stripe_cards(self): return get_customer_cards(self.account, self.request.user.username, self.domain) @property def show_hidden(self): if not self.request.user.is_superuser: return False return bool(self.request.POST.get('additionalData[show_hidden]')) @property def show_unpaid(self): try: return json.loads(self.request.POST.get('additionalData[show_unpaid]')) except TypeError: return False @property def invoices(self): invoices = Invoice.objects.filter(subscription__subscriber__domain=self.domain) if not self.show_hidden: invoices = invoices.filter(is_hidden=False) if self.show_unpaid: invoices = invoices.filter(date_paid__exact=None) return invoices.order_by('-date_start', '-date_end') @property def total(self): return self.paginated_invoices.count @property @memoized def paginated_invoices(self): return Paginator(self.invoices, self.limit) @property def total_balance(self): """ Returns the total balance of unpaid, unhidden invoices. Doesn't take into account the view settings on the page. """ invoices = (Invoice.objects .filter(subscription__subscriber__domain=self.domain) .filter(date_paid__exact=None) .filter(is_hidden=False)) return invoices.aggregate( total_balance=Sum('balance') ).get('total_balance') or 0.00 @property def column_names(self): return [ _("Statement No."), _("Plan"), _("Billing Period"), _("Date Due"), _("Payment Status"), _("PDF"), ] @property def page_context(self): pagination_context = self.pagination_context pagination_context.update({ 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'payment_error_messages': PAYMENT_ERROR_MESSAGES, 'process_invoice_payment_url': reverse( InvoiceStripePaymentView.urlname, args=[self.domain], ), 'process_bulk_payment_url': reverse( BulkStripePaymentView.urlname, args=[self.domain], ), 'process_wire_invoice_url': reverse( WireInvoiceView.urlname, args=[self.domain], ), 'stripe_cards': self.stripe_cards, 'total_balance': self.total_balance, }) return pagination_context @property def can_pay_invoices(self): return self.request.couch_user.is_domain_admin(self.domain) @property def paginated_list(self): for invoice in self.paginated_invoices.page(self.page).object_list: try: last_billing_record = BillingRecord.objects.filter( invoice=invoice ).latest('date_created') if invoice.is_paid: payment_status = (_("Paid on %s.") % invoice.date_paid.strftime(USER_DATE_FORMAT)) payment_class = "label label-inverse" else: payment_status = _("Not Paid") payment_class = "label label-important" date_due = ( (invoice.date_due.strftime(USER_DATE_FORMAT) if not invoice.is_paid else _("Already Paid")) if invoice.date_due else _("None") ) yield { 'itemData': { 'id': invoice.id, 'invoice_number': invoice.invoice_number, 'start': invoice.date_start.strftime(USER_DATE_FORMAT), 'end': invoice.date_end.strftime(USER_DATE_FORMAT), 'plan': invoice.subscription.plan_version.user_facing_description, 'payment_status': payment_status, 'payment_class': payment_class, 'date_due': date_due, 'pdfUrl': reverse( BillingStatementPdfView.urlname, args=[self.domain, last_billing_record.pdf_data_id] ), 'canMakePayment': (not invoice.is_paid and self.can_pay_invoices), 'balance': "%s" % quantize_accounting_decimal(invoice.balance), }, 'template': 'statement-row-template', } except BillingRecord.DoesNotExist: logging.error( "An invoice was generated for %(invoice_id)d " "(domain: %(domain)s), but no billing record!" % { 'invoice_id': invoice.id, 'domain': self.domain, }) def refresh_item(self, item_id): pass def post(self, *args, **kwargs): return self.paginate_crud_response def dispatch(self, request, *args, **kwargs): if self.account is None: raise Http404() return super(DomainBillingStatementsView, self).dispatch(request, *args, **kwargs) class BaseStripePaymentView(DomainAccountingSettings): http_method_names = ['post'] @property def account(self): raise NotImplementedError("you must impmement the property account") @property @memoized def domain_admin(self): if self.request.couch_user.is_domain_admin(self.domain): return self.request.couch_user.username else: raise PaymentRequestError( "The logged in user was not a domain admin." ) def get_or_create_payment_method(self): return StripePaymentMethod.objects.get_or_create( web_user=self.domain_admin, method_type=PaymentMethodType.STRIPE, )[0] def get_payment_handler(self): """Returns a StripePaymentHandler object """ raise NotImplementedError("You must impmenent get_payment_handler()") def post(self, request, *args, **kwargs): try: payment_handler = self.get_payment_handler() response = payment_handler.process_request(request) except PaymentRequestError as e: accounting_logger.error( "[BILLING] Failed to process Stripe Payment due to bad " "request for domain %(domain)s user %(web_user)s: " "%(error)s" % { 'domain': self.domain, 'web_user': self.request.user.username, 'error': e, } ) response = { 'error': { 'message': _( "There was an issue processing your payment. No " "charges were made. We're looking into the issue " "as quickly as possible. Sorry for the inconvenience." ) } } return json_response(response) class CreditsStripePaymentView(BaseStripePaymentView): urlname = 'domain_credits_payment' @property @memoized def account(self): return BillingAccount.get_or_create_account_by_domain( self.domain, created_by=self.request.user.username, account_type=BillingAccountType.USER_CREATED, entry_point=EntryPoint.SELF_STARTED, )[0] def get_payment_handler(self): return CreditStripePaymentHandler( self.get_or_create_payment_method(), self.domain, self.account, subscription=Subscription.get_subscribed_plan_by_domain(self.domain_object)[1], post_data=self.request.POST.copy(), ) class CreditsWireInvoiceView(DomainAccountingSettings): http_method_names = ['post'] urlname = 'domain_wire_payment' @method_decorator(login_and_domain_required) def dispatch(self, request, *args, **kwargs): return super(CreditsWireInvoiceView, self).dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): emails = request.POST.get('emails', []).split() amount = Decimal(request.POST.get('amount', 0)) wire_invoice_factory = DomainWireInvoiceFactory(request.domain, contact_emails=emails) try: wire_invoice_factory.create_wire_credits_invoice(self._get_items(request), amount) except Exception as e: return json_response({'error': {'message': str(e)}}) return json_response({'success': True}) def _get_items(self, request): product_type = SoftwareProductType.get_type_by_domain(Domain.get_by_name(self.domain)) features = [{'type': get_feature_name(feature_type[0], product_type), 'amount': Decimal(request.POST.get(feature_type[0], 0))} for feature_type in FeatureType.CHOICES if Decimal(request.POST.get(feature_type[0], 0)) > 0] products = [{'type': pt[0], 'amount': Decimal(request.POST.get(pt[0], 0))} for pt in SoftwareProductType.CHOICES if Decimal(request.POST.get(pt[0], 0)) > 0] return products + features class InvoiceStripePaymentView(BaseStripePaymentView): urlname = 'domain_invoice_payment' @property @memoized def invoice(self): try: invoice_id = self.request.POST['invoice_id'] except IndexError: raise PaymentRequestError("invoice_id is required") try: return Invoice.objects.get(pk=invoice_id) except Invoice.DoesNotExist: raise PaymentRequestError( "Could not find a matching invoice for invoice_id '%s'" % invoice_id ) @property def account(self): return self.invoice.subscription.account def get_payment_handler(self): return InvoiceStripePaymentHandler( self.get_or_create_payment_method(), self.domain, self.invoice ) class BulkStripePaymentView(BaseStripePaymentView): urlname = 'domain_bulk_payment' @property def account(self): return BillingAccount.get_account_by_domain(self.domain) def get_payment_handler(self): return BulkStripePaymentHandler( self.get_or_create_payment_method(), self.domain ) class WireInvoiceView(View): http_method_names = ['post'] urlname = 'domain_wire_invoice' @method_decorator(login_and_domain_required) @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): return super(WireInvoiceView, self).dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): emails = request.POST.get('emails', []).split() balance = Decimal(request.POST.get('customPaymentAmount', 0)) wire_invoice_factory = DomainWireInvoiceFactory(request.domain, contact_emails=emails) try: wire_invoice_factory.create_wire_invoice(balance) except Exception, e: return json_response({'error': {'message', e}}) return json_response({'success': True}) class BillingStatementPdfView(View): urlname = 'domain_billing_statement_download' @method_decorator(login_and_domain_required) @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): return super(BillingStatementPdfView, self).dispatch(request, *args, **kwargs) def get(self, request, *args, **kwargs): domain = args[0] statement_id = kwargs.get('statement_id') if statement_id is None or domain is None: raise Http404() try: invoice_pdf = InvoicePdf.get(statement_id) except ResourceNotFound: raise Http404() try: if invoice_pdf.is_wire: invoice = WireInvoice.objects.get( pk=invoice_pdf.invoice_id, domain=domain ) else: invoice = Invoice.objects.get( pk=invoice_pdf.invoice_id, subscription__subscriber__domain=domain ) except (Invoice.DoesNotExist, WireInvoice.DoesNotExist): raise Http404() if invoice.is_wire: edition = 'Bulk' else: edition = DESC_BY_EDITION[invoice.subscription.plan_version.plan.edition]['name'] filename = "%(pdf_id)s_%(domain)s_%(edition)s_%(filename)s" % { 'pdf_id': invoice_pdf._id, 'domain': domain, 'edition': edition, 'filename': invoice_pdf.get_filename(invoice), } try: data = invoice_pdf.get_data(invoice) response = HttpResponse(data, content_type='application/pdf') response['Content-Disposition'] = 'inline;filename="%s' % filename except Exception as e: logging.error('[Billing] Fetching invoice PDF failed: %s' % e) return HttpResponse(_("Could not obtain billing statement. " "An issue has been submitted.")) return response class InternalSubscriptionManagementView(BaseAdminProjectSettingsView): template_name = 'domain/internal_subscription_management.html' urlname = 'internal_subscription_mgmt' page_title = ugettext_lazy("Dimagi Internal Subscription Management") form_classes = INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS @method_decorator(require_superuser) def get(self, request, *args, **kwargs): return super(InternalSubscriptionManagementView, self).get(request, *args, **kwargs) @method_decorator(require_superuser) def post(self, request, *args, **kwargs): form = self.get_post_form if form.is_valid(): try: form.process_subscription_management() return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain])) except NewSubscriptionError as e: messages.error(self.request, e.message) return self.get(request, *args, **kwargs) @property def page_context(self): return { 'plan_name': Subscription.get_subscribed_plan_by_domain(self.domain)[0], 'select_subscription_type_form': self.select_subscription_type_form, 'subscription_management_forms': self.slug_to_form.values(), 'today': datetime.date.today(), } @property def get_post_form(self): return self.slug_to_form[self.request.POST.get('slug')] @property @memoized def slug_to_form(self): def create_form(form_class): if self.request.method == 'POST' and form_class.slug == self.request.POST.get('slug'): return form_class(self.domain, self.request.couch_user.username, self.request.POST) return form_class(self.domain, self.request.couch_user.username) return {form_class.slug: create_form(form_class) for form_class in self.form_classes} @property @memoized def select_subscription_type_form(self): if self.request.method == 'POST': for form_slug in self.slug_to_form: if form_slug in self.request.POST: return SelectSubscriptionTypeForm({ 'subscription_type': form_slug, }) subscription_type = None subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object)[1] if subscription is None: subscription_type = None else: plan = subscription.plan_version.plan if subscription.service_type == SubscriptionType.CONTRACTED: subscription_type = "contracted_partner" elif plan.edition == SoftwarePlanEdition.ENTERPRISE: subscription_type = "dimagi_only_enterprise" elif (plan.edition == SoftwarePlanEdition.ADVANCED and plan.visibility == SoftwarePlanVisibility.TRIAL_INTERNAL): subscription_type = "advanced_extended_trial" return SelectSubscriptionTypeForm({'subscription_type': subscription_type}) class SelectPlanView(DomainAccountingSettings): template_name = 'domain/select_plan.html' urlname = 'domain_select_plan' page_title = ugettext_lazy("Change Plan") step_title = ugettext_lazy("Select Plan") edition = None lead_text = ugettext_lazy("Please select a plan below that fits your organization's needs.") @property def edition_name(self): if self.edition: return DESC_BY_EDITION[self.edition]['name'] @property def is_non_ops_superuser(self): if not self.request.couch_user.is_superuser: return False return not has_privilege(self.request, privileges.ACCOUNTING_ADMIN) @property def parent_pages(self): return [ { 'title': DomainSubscriptionView.page_title, 'url': reverse(DomainSubscriptionView.urlname, args=[self.domain]), } ] @property def steps(self): edition_name = u" (%s)" % self.edition_name if self.edition_name else "" return [ { 'title': _(u"1. Select a Plan%(edition_name)s") % { "edition_name": edition_name }, 'url': reverse(SelectPlanView.urlname, args=[self.domain]), } ] @property def main_context(self): context = super(SelectPlanView, self).main_context context.update({ 'steps': self.steps, 'step_title': self.step_title, 'lead_text': self.lead_text, }) return context @property def page_context(self): return { 'pricing_table': PricingTable.get_table_by_product(self.product, domain=self.domain), 'current_edition': (self.current_subscription.plan_version.plan.edition.lower() if self.current_subscription is not None and not self.current_subscription.is_trial else ""), 'is_non_ops_superuser': self.is_non_ops_superuser, } class EditPrivacySecurityView(BaseAdminProjectSettingsView): template_name = "domain/admin/project_privacy.html" urlname = "privacy_info" page_title = ugettext_lazy("Privacy and Security") @property @memoized def privacy_form(self): initial = { "secure_submissions": self.domain_object.secure_submissions, "restrict_superusers": self.domain_object.restrict_superusers, "allow_domain_requests": self.domain_object.allow_domain_requests, } if self.request.method == 'POST': return PrivacySecurityForm(self.request.POST, initial=initial) return PrivacySecurityForm(initial=initial) @property def page_context(self): return { 'privacy_form': self.privacy_form } def post(self, request, *args, **kwargs): if self.privacy_form.is_valid(): self.privacy_form.save(self.domain_object) messages.success(request, _("Your project settings have been saved!")) return self.get(request, *args, **kwargs) class SelectedEnterprisePlanView(SelectPlanView): template_name = 'domain/selected_enterprise_plan.html' urlname = 'enterprise_request_quote' step_title = ugettext_lazy("Contact Dimagi") edition = SoftwarePlanEdition.ENTERPRISE @property def steps(self): last_steps = super(SelectedEnterprisePlanView, self).steps last_steps.append({ 'title': _("2. Contact Dimagi"), 'url': reverse(SelectedEnterprisePlanView.urlname, args=[self.domain]), }) return last_steps @property @memoized def is_not_redirect(self): return not 'plan_edition' in self.request.POST @property @memoized def enterprise_contact_form(self): if self.request.method == 'POST' and self.is_not_redirect: return EnterprisePlanContactForm(self.domain, self.request.couch_user, data=self.request.POST) return EnterprisePlanContactForm(self.domain, self.request.couch_user) @property def page_context(self): return { 'enterprise_contact_form': self.enterprise_contact_form, } def post(self, request, *args, **kwargs): if self.is_not_redirect and self.enterprise_contact_form.is_valid(): self.enterprise_contact_form.send_message() messages.success(request, _("Your request was sent to Dimagi. " "We will try our best to follow up in a timely manner.")) return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain])) return self.get(request, *args, **kwargs) class ConfirmSelectedPlanView(SelectPlanView): template_name = 'domain/confirm_plan.html' urlname = 'confirm_selected_plan' step_title = ugettext_lazy("Confirm Plan") @property def steps(self): last_steps = super(ConfirmSelectedPlanView, self).steps last_steps.append({ 'title': _("2. Confirm Plan"), 'url': reverse(SelectPlanView.urlname, args=[self.domain]), }) return last_steps @property @memoized def edition(self): edition = self.request.POST.get('plan_edition').title() if edition not in [e[0] for e in SoftwarePlanEdition.CHOICES]: raise Http404() return edition @property @memoized def selected_plan_version(self): return DefaultProductPlan.get_default_plan_by_domain(self.domain, self.edition).plan.get_version() @property def downgrade_messages(self): current_plan_version, subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object) if subscription is None: current_plan_version = None downgrades = get_change_status(current_plan_version, self.selected_plan_version)[1] downgrade_handler = DomainDowngradeStatusHandler( self.domain_object, self.selected_plan_version, downgrades, web_user=self.request.user.username ) return downgrade_handler.get_response() @property def page_context(self): return { 'downgrade_messages': self.downgrade_messages, 'current_plan': (self.current_subscription.plan_version.user_facing_description if self.current_subscription is not None else None), 'show_community_notice': (self.edition == SoftwarePlanEdition.COMMUNITY and self.current_subscription is None), } @property def main_context(self): context = super(ConfirmSelectedPlanView, self).main_context context.update({ 'plan': self.selected_plan_version.user_facing_description, }) return context def get(self, request, *args, **kwargs): return HttpResponseRedirect(reverse(SelectPlanView.urlname, args=[self.domain])) def post(self, request, *args, **kwargs): if self.edition == SoftwarePlanEdition.ENTERPRISE and not self.request.couch_user.is_superuser: return HttpResponseRedirect(reverse(SelectedEnterprisePlanView.urlname, args=[self.domain])) return super(ConfirmSelectedPlanView, self).get(request, *args, **kwargs) class ConfirmBillingAccountInfoView(ConfirmSelectedPlanView, AsyncHandlerMixin): template_name = 'domain/confirm_billing_info.html' urlname = 'confirm_billing_account_info' step_title = ugettext_lazy("Confirm Billing Information") is_new = False async_handlers = [ Select2BillingInfoHandler, ] @property def steps(self): last_steps = super(ConfirmBillingAccountInfoView, self).steps last_steps.append({ 'title': _("3. Confirm Billing Account"), 'url': reverse(ConfirmBillingAccountInfoView.urlname, args=[self.domain]), }) return last_steps @property @memoized def account(self): if self.current_subscription: return self.current_subscription.account account, self.is_new = BillingAccount.get_or_create_account_by_domain( self.domain, created_by=self.request.couch_user.username, account_type=BillingAccountType.USER_CREATED, entry_point=EntryPoint.SELF_STARTED, ) return account @property def payment_method(self): user = self.request.user.username payment_method, __ = StripePaymentMethod.objects.get_or_create( web_user=user, method_type=PaymentMethodType.STRIPE, ) return payment_method @property @memoized def is_form_post(self): return 'company_name' in self.request.POST @property @memoized def billing_account_info_form(self): initial = None if self.edition == SoftwarePlanEdition.ENTERPRISE and self.request.couch_user.is_superuser: initial = { 'company_name': "Dimagi", 'first_line': "585 Massachusetts Ave", 'second_line': "Suite 4", 'city': "Cambridge", 'state_province_region': "MA", 'postal_code': "02139", 'country': "US", } if self.request.method == 'POST' and self.is_form_post: return ConfirmNewSubscriptionForm( self.account, self.domain, self.request.couch_user.username, self.selected_plan_version, self.current_subscription, data=self.request.POST, initial=initial ) return ConfirmNewSubscriptionForm(self.account, self.domain, self.request.couch_user.username, self.selected_plan_version, self.current_subscription, initial=initial) @property def page_context(self): return { 'billing_account_info_form': self.billing_account_info_form, 'stripe_public_key': settings.STRIPE_PUBLIC_KEY, 'cards': self.payment_method.all_cards_serialized(self.account) } def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response if self.edition == SoftwarePlanEdition.ENTERPRISE and not self.request.couch_user.is_superuser: return HttpResponseRedirect(reverse(SelectedEnterprisePlanView.urlname, args=[self.domain])) if self.is_form_post and self.billing_account_info_form.is_valid(): is_saved = self.billing_account_info_form.save() software_plan_name = DESC_BY_EDITION[self.selected_plan_version.plan.edition]['name'].encode('utf-8') if not is_saved: messages.error( request, _("It appears there was an issue subscribing your project to the %s Software Plan. You " "may try resubmitting, but if that doesn't work, rest assured someone will be " "contacting you shortly.") % software_plan_name) else: messages.success( request, _("Your project has been successfully subscribed to the %s Software Plan." % software_plan_name) ) return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain])) return super(ConfirmBillingAccountInfoView, self).post(request, *args, **kwargs) class SubscriptionMixin(object): @property @memoized def subscription(self): subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object)[1] if subscription is None: raise Http404 if subscription.is_renewed: raise Http404 return subscription class SubscriptionRenewalView(SelectPlanView, SubscriptionMixin): urlname = "domain_subscription_renewal" page_title = ugettext_lazy("Renew Plan") step_title = ugettext_lazy("Renew or Change Plan") @property def lead_text(self): return ugettext_lazy("Based on your current usage we recommend you use the <strong>{plan}</strong> plan" .format(plan=self.current_subscription.plan_version.plan.edition)) @property def main_context(self): context = super(SubscriptionRenewalView, self).main_context context.update({'is_renewal': True}) return context @property def page_context(self): context = super(SubscriptionRenewalView, self).page_context current_privs = get_privileges(self.subscription.plan_version) plan = DefaultProductPlan.get_lowest_edition_by_domain( self.domain, current_privs, return_plan=False, ).lower() context['current_edition'] = (plan if self.current_subscription is not None and not self.current_subscription.is_trial else "") return context class ConfirmSubscriptionRenewalView(DomainAccountingSettings, AsyncHandlerMixin, SubscriptionMixin): template_name = 'domain/confirm_subscription_renewal.html' urlname = 'domain_subscription_renewal_confirmation' page_title = ugettext_lazy("Renew Plan") async_handlers = [ Select2BillingInfoHandler, ] @property @memoized def next_plan_version(self): new_edition = self.request.POST.get('plan_edition').title() plan_version = DefaultProductPlan.get_default_plan_by_domain(self.domain, new_edition) if plan_version is None: logging.error("[BILLING] Could not find a matching renewable plan " "for %(domain)s, subscription number %(sub_pk)s." % { 'domain': self.domain, 'sub_pk': self.subscription.pk }) raise Http404 return plan_version @property @memoized def confirm_form(self): if self.request.method == 'POST' and "from_plan_page" not in self.request.POST: return ConfirmSubscriptionRenewalForm( self.account, self.domain, self.request.couch_user.username, self.subscription, self.next_plan_version, data=self.request.POST, ) return ConfirmSubscriptionRenewalForm( self.account, self.domain, self.request.couch_user.username, self.subscription, self.next_plan_version, ) @property def page_context(self): return { 'subscription': self.subscription, 'plan': self.subscription.plan_version.user_facing_description, 'confirm_form': self.confirm_form, 'next_plan': self.next_plan_version.user_facing_description, } def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response if self.confirm_form.is_valid(): is_saved = self.confirm_form.save() if not is_saved: messages.error( request, _( "There was an issue renewing your subscription. We " "have been notified of the issue. Please try " "submitting again, and if the problem persists, " "please try in a few hours." ) ) else: messages.success( request, _("Your subscription was successfully renewed!") ) return HttpResponseRedirect( reverse(DomainSubscriptionView.urlname, args=[self.domain]) ) return self.get(request, *args, **kwargs) class ExchangeSnapshotsView(BaseAdminProjectSettingsView): template_name = 'domain/snapshot_settings.html' urlname = 'domain_snapshot_settings' page_title = ugettext_lazy("CommCare Exchange") @property def page_context(self): return { 'project': self.domain_object, 'snapshots': list(self.domain_object.snapshots()), 'published_snapshot': self.domain_object.published_snapshot(), } class CreateNewExchangeSnapshotView(BaseAdminProjectSettingsView): template_name = 'domain/create_snapshot.html' urlname = 'domain_create_snapshot' page_title = ugettext_lazy("Publish New Version") strict_domain_fetching = True @property def parent_pages(self): return [{ 'title': ExchangeSnapshotsView.page_title, 'url': reverse(ExchangeSnapshotsView.urlname, args=[self.domain]), }] @property def page_context(self): context = { 'form': self.snapshot_settings_form, 'app_forms': self.app_forms, 'fixture_forms': self.fixture_forms, 'can_publish_as_org': self.can_publish_as_org, 'autocomplete_fields': ('project_type', 'phone_model', 'user_type', 'city', 'countries', 'region'), } if self.published_snapshot: context.update({ 'published_as_org': self.published_snapshot.publisher == 'organization', 'author': self.published_snapshot.author, }) elif self.request.method == 'POST': context.update({ 'published_as_org': self.request.POST.get('publisher', '') == 'organization', 'author': self.request.POST.get('author', '') }) return context @property def can_publish_as_org(self): return (self.domain_object.get_organization() and self.request.couch_user.is_org_admin(self.domain_object.get_organization().name)) @property @memoized def snapshots(self): return list(self.domain_object.snapshots()) @property @memoized def published_snapshot(self): return self.snapshots[0] if self.snapshots else self.domain_object @property @memoized def published_apps(self): published_apps = {} if self.published_snapshot: for app in self.published_snapshot.full_applications(): base_app_id = app.copy_of if self.domain_object == self.published_snapshot else app.copied_from.copy_of if base_app_id: published_apps[base_app_id] = app return published_apps @property def app_forms(self): app_forms = [] for app in self.domain_object.applications(): if self.request.method == 'POST': app_forms.append((app, SnapshotApplicationForm(self.request.POST, prefix=app.id))) elif self.published_snapshot and app.copy_of in self.published_apps: original = self.published_apps[app.copy_of] app_forms.append((app, SnapshotApplicationForm(initial={ 'publish': True, 'name': original.name, 'description': original.description, 'deployment_date': original.deployment_date, 'user_type': original.user_type, 'attribution_notes': original.attribution_notes, 'phone_model': original.phone_model, }, prefix=app.id))) else: app_forms.append((app, SnapshotApplicationForm( initial={ 'publish': (self.published_snapshot is None or self.published_snapshot == self.domain_object) }, prefix=app.id))) return app_forms @property @memoized def published_fixtures(self): return [f.copy_from for f in FixtureDataType.by_domain(self.published_snapshot._id)] @property def fixture_forms(self): fixture_forms = [] for fixture in FixtureDataType.by_domain(self.domain_object.name): fixture.id = fixture._id if self.request.method == 'POST': fixture_forms.append((fixture, SnapshotFixtureForm(self.request.POST, prefix=fixture._id))) else: fixture_forms.append((fixture, SnapshotFixtureForm( initial={ 'publish': (self.published_snapshot == self.domain_object or fixture._id in self.published_fixtures) }, prefix=fixture._id))) return fixture_forms @property @memoized def snapshot_settings_form(self): if self.request.method == 'POST': form = SnapshotSettingsForm(self.request.POST, self.request.FILES, domain=self.domain_object, is_superuser=self.request.user.is_superuser) return form proj = self.published_snapshot if self.published_snapshot else self.domain_object initial = { 'case_sharing': json.dumps(proj.case_sharing), 'publish_on_submit': True, 'share_multimedia': self.published_snapshot.multimedia_included if self.published_snapshot else True, } init_attribs = ['default_timezone', 'project_type', 'license'] if self.published_snapshot: init_attribs.extend(['title', 'description', 'short_description']) if self.published_snapshot.yt_id: initial['video'] = 'http://www.youtube.com/watch?v=%s' % self.published_snapshot.yt_id for attr in init_attribs: initial[attr] = getattr(proj, attr) return SnapshotSettingsForm(initial=initial, domain=self.domain_object, is_superuser=self.request.user.is_superuser) @property @memoized def has_published_apps(self): for app in self.domain_object.applications(): if self.request.POST.get("%s-publish" % app.id, False): return True messages.error(self.request, _("Cannot publish a project without applications to CommCare Exchange")) return False @property def has_signed_eula(self): eula_signed = self.request.couch_user.is_eula_signed() if not eula_signed: messages.error(self.request, _("You must agree to our eula to publish a project to Exchange")) return eula_signed @property def has_valid_form(self): is_valid = self.snapshot_settings_form.is_valid() if not is_valid: messages.error(self.request, _("There are some problems with your form. " "Please address these issues and try again.")) return is_valid def post(self, request, *args, **kwargs): if self.has_published_apps and self.has_signed_eula and self.has_valid_form: new_license = request.POST['license'] if request.POST.get('share_multimedia', False): app_ids = self.snapshot_settings_form._get_apps_to_publish() media = self.domain_object.all_media(from_apps=app_ids) for m_file in media: if self.domain not in m_file.shared_by: m_file.shared_by.append(self.domain) # set the license of every multimedia file that doesn't yet have a license set if not m_file.license: m_file.update_or_add_license(self.domain, type=new_license, should_save=False) m_file.save() if not request.POST.get('share_reminders', False): share_reminders = False else: share_reminders = True copy_by_id = set() for k in request.POST.keys(): if k.endswith("-publish"): copy_by_id.add(k[:-len("-publish")]) old = self.domain_object.published_snapshot() new_domain = self.domain_object.save_snapshot( share_reminders=share_reminders, copy_by_id=copy_by_id) new_domain.license = new_license new_domain.description = request.POST['description'] new_domain.short_description = request.POST['short_description'] new_domain.project_type = request.POST['project_type'] new_domain.title = request.POST['title'] new_domain.multimedia_included = request.POST.get('share_multimedia', '') == 'on' new_domain.publisher = request.POST.get('publisher', None) or 'user' if request.POST.get('video'): new_domain.yt_id = self.snapshot_settings_form.cleaned_data['video'] new_domain.author = request.POST.get('author', None) new_domain.is_approved = False new_domain.is_starter_app = request.POST.get('is_starter_app', '') == 'on' publish_on_submit = request.POST.get('publish_on_submit', "no") == "yes" image = self.snapshot_settings_form.cleaned_data['image'] if image: new_domain.image_path = image.name new_domain.image_type = image.content_type elif request.POST.get('old_image', False): new_domain.image_path = old.image_path new_domain.image_type = old.image_type new_domain.save() documentation_file = self.snapshot_settings_form.cleaned_data['documentation_file'] if documentation_file: new_domain.documentation_file_path = documentation_file.name new_domain.documentation_file_type = documentation_file.content_type elif request.POST.get('old_documentation_file', False): new_domain.documentation_file_path = old.documentation_file_path new_domain.documentation_file_type = old.documentation_file_type new_domain.save() if publish_on_submit: _publish_snapshot(request, self.domain_object, published_snapshot=new_domain) else: new_domain.published = False new_domain.save() if image: im = Image.open(image) out = cStringIO.StringIO() im.thumbnail((200, 200), Image.ANTIALIAS) im.save(out, new_domain.image_type.split('/')[-1]) new_domain.put_attachment(content=out.getvalue(), name=image.name) elif request.POST.get('old_image', False): new_domain.put_attachment(content=old.fetch_attachment(old.image_path), name=new_domain.image_path) if documentation_file: new_domain.put_attachment(content=documentation_file, name=documentation_file.name) elif request.POST.get('old_documentation_file', False): new_domain.put_attachment(content=old.fetch_attachment(old.documentation_file_path), name=new_domain.documentation_file_path) for application in new_domain.full_applications(): original_id = application.copied_from._id name_field = "%s-name" % original_id if name_field not in request.POST: continue application.name = request.POST[name_field] application.description = request.POST["%s-description" % original_id] date_picked = request.POST["%s-deployment_date" % original_id] try: date_picked = dateutil.parser.parse(date_picked) if date_picked.year > 2009: application.deployment_date = date_picked except Exception: pass application.phone_model = request.POST["%s-phone_model" % original_id] application.attribution_notes = request.POST["%s-attribution_notes" % original_id] application.user_type = request.POST["%s-user_type" % original_id] if not new_domain.multimedia_included: application.multimedia_map = {} application.save() for fixture in FixtureDataType.by_domain(new_domain.name): old_id = FixtureDataType.by_domain_tag(self.domain_object.name, fixture.tag).first()._id fixture.description = request.POST["%s-description" % old_id] fixture.save() if new_domain is None: messages.error(request, _("Version creation failed; please try again")) else: messages.success(request, (_("Created a new version of your app. This version will be posted to " "CommCare Exchange pending approval by admins.") if publish_on_submit else _("Created a new version of your app."))) return redirect(ExchangeSnapshotsView.urlname, self.domain) return self.get(request, *args, **kwargs) class ManageProjectMediaView(BaseAdminProjectSettingsView): urlname = 'domain_manage_multimedia' page_title = ugettext_lazy("Multimedia Sharing") template_name = 'domain/admin/media_manager.html' @property def project_media_data(self): return [{ 'license': m.license.type if m.license else 'public', 'shared': self.domain in m.shared_by, 'url': m.url(), 'm_id': m._id, 'tags': m.tags.get(self.domain, []), 'type': m.doc_type, } for m in self.request.project.all_media()] @property def page_context(self): return { 'media': self.project_media_data, 'licenses': LICENSES.items(), } @retry_resource(3) def post(self, request, *args, **kwargs): for m_file in request.project.all_media(): if '%s_tags' % m_file._id in request.POST: m_file.tags[self.domain] = request.POST.get('%s_tags' % m_file._id, '').split(' ') if self.domain not in m_file.shared_by and request.POST.get('%s_shared' % m_file._id, False): m_file.shared_by.append(self.domain) elif self.domain in m_file.shared_by and not request.POST.get('%s_shared' % m_file._id, False): m_file.shared_by.remove(self.domain) if '%s_license' % m_file._id in request.POST: m_file.update_or_add_license(self.domain, type=request.POST.get('%s_license' % m_file._id, 'public'), should_save=True) m_file.save() messages.success(request, _("Multimedia updated successfully!")) return self.get(request, *args, **kwargs) class RepeaterMixin(object): @property def friendly_repeater_names(self): return { 'FormRepeater': _("Forms"), 'CaseRepeater': _("Cases"), 'ShortFormRepeater': _("Form Stubs"), 'AppStructureRepeater': _("App Schema Changes"), } class DomainForwardingOptionsView(BaseAdminProjectSettingsView, RepeaterMixin): urlname = 'domain_forwarding' page_title = ugettext_lazy("Data Forwarding") template_name = 'domain/admin/domain_forwarding.html' @property def repeaters(self): available_repeaters = [ FormRepeater, CaseRepeater, ShortFormRepeater, AppStructureRepeater, ] return [(r.__name__, r.by_domain(self.domain), self.friendly_repeater_names[r.__name__]) for r in available_repeaters] @property def page_context(self): return { 'repeaters': self.repeaters, 'pending_record_count': RepeatRecord.count(self.domain), } class AddRepeaterView(BaseAdminProjectSettingsView, RepeaterMixin): urlname = 'add_repeater' page_title = ugettext_lazy("Forward Data") template_name = 'domain/admin/add_form_repeater.html' repeater_form_class = GenericRepeaterForm @property def page_url(self): return reverse(self.urlname, args=[self.domain, self.repeater_type]) @property def parent_pages(self): return [{ 'title': DomainForwardingOptionsView.page_title, 'url': reverse(DomainForwardingOptionsView.urlname, args=[self.domain]), }] @property def repeater_type(self): return self.kwargs['repeater_type'] @property def page_name(self): return "Forward %s" % self.friendly_repeater_names.get(self.repeater_type, "Data") @property @memoized def repeater_class(self): try: return receiverwrapper.models.repeater_types[self.repeater_type] except KeyError: raise Http404() @property @memoized def add_repeater_form(self): if self.request.method == 'POST': return self.repeater_form_class( self.request.POST, domain=self.domain, repeater_class=self.repeater_class ) return self.repeater_form_class( domain=self.domain, repeater_class=self.repeater_class ) @property def page_context(self): return { 'form': self.add_repeater_form, 'repeater_type': self.repeater_type, } def make_repeater(self): repeater = self.repeater_class( domain=self.domain, url=self.add_repeater_form.cleaned_data['url'], use_basic_auth=self.add_repeater_form.cleaned_data['use_basic_auth'], username=self.add_repeater_form.cleaned_data['username'], password=self.add_repeater_form.cleaned_data['password'], format=self.add_repeater_form.cleaned_data['format'] ) return repeater def post(self, request, *args, **kwargs): if self.add_repeater_form.is_valid(): repeater = self.make_repeater() repeater.save() messages.success(request, _("Forwarding set up to %s" % repeater.url)) return HttpResponseRedirect(reverse(DomainForwardingOptionsView.urlname, args=[self.domain])) return self.get(request, *args, **kwargs) class AddFormRepeaterView(AddRepeaterView): urlname = 'add_form_repeater' repeater_form_class = FormRepeaterForm @property def page_url(self): return reverse(self.urlname, args=[self.domain]) def make_repeater(self): repeater = super(AddFormRepeaterView, self).make_repeater() repeater.exclude_device_reports = self.add_repeater_form.cleaned_data['exclude_device_reports'] repeater.include_app_id_param = self.add_repeater_form.cleaned_data['include_app_id_param'] return repeater class OrgSettingsView(BaseAdminProjectSettingsView): template_name = 'domain/orgs_settings.html' urlname = 'domain_org_settings' page_title = ugettext_lazy("Organization") @method_decorator(requires_privilege_with_fallback(privileges.CROSS_PROJECT_REPORTS)) def dispatch(self, request, *args, **kwargs): return super(OrgSettingsView, self).dispatch(request, *args, **kwargs) @property def page_context(self): domain = self.domain_object org_users = [] teams = Team.get_by_domain(domain.name) for team in teams: for user in team.get_members(): user.team_id = team.get_id user.team = team.name org_users.append(user) for user in org_users: user.current_domain = domain.name all_orgs = Organization.get_all() return { "project": domain, 'domain': domain.name, "organization": Organization.get_by_name(getattr(domain, "organization", None)), "org_users": org_users, "all_orgs": all_orgs, } class BaseInternalDomainSettingsView(BaseProjectSettingsView): strict_domain_fetching = True @method_decorator(login_and_domain_required) @method_decorator(require_superuser) def dispatch(self, request, *args, **kwargs): return super(BaseInternalDomainSettingsView, self).dispatch(request, *args, **kwargs) @property def main_context(self): context = super(BaseInternalDomainSettingsView, self).main_context context.update({ 'project': self.domain_object, }) return context @property def page_name(self): return mark_safe("%s <small>Internal</small>" % self.page_title) class EditInternalDomainInfoView(BaseInternalDomainSettingsView): urlname = 'domain_internal_settings' page_title = ugettext_lazy("Project Information") template_name = 'domain/internal_settings.html' strict_domain_fetching = True @property def autocomplete_fields(self): return ['countries'] @property @memoized def internal_settings_form(self): can_edit_eula = CAN_EDIT_EULA.enabled(self.request.couch_user.username) if self.request.method == 'POST': return DomainInternalForm(can_edit_eula, self.request.POST) initial = { 'deployment_date': self.domain_object.deployment.date.date if self.domain_object.deployment.date else '', 'countries': self.domain_object.deployment.countries, 'is_test': self.domain_object.is_test, } internal_attrs = [ 'sf_contract_id', 'sf_account_id', 'services', 'initiative', 'self_started', 'area', 'sub_area', 'organization_name', 'notes', 'phone_model', 'commtrack_domain', 'business_unit', 'workshop_region', ] if can_edit_eula: internal_attrs += [ 'custom_eula', 'can_use_data', ] for attr in internal_attrs: val = getattr(self.domain_object.internal, attr) if isinstance(val, bool): val = 'true' if val else 'false' initial[attr] = val return DomainInternalForm(can_edit_eula, initial=initial) @property def page_context(self): return { 'project': self.domain_object, 'form': self.internal_settings_form, 'areas': dict([(a["name"], a["sub_areas"]) for a in settings.INTERNAL_DATA["area"]]), } def post(self, request, *args, **kwargs): if self.internal_settings_form.is_valid(): old_attrs = copy.copy(self.domain_object.internal) self.internal_settings_form.save(self.domain_object) eula_props_changed = (bool(old_attrs.custom_eula) != bool(self.domain_object.internal.custom_eula) or bool(old_attrs.can_use_data) != bool(self.domain_object.internal.can_use_data)) if eula_props_changed and settings.EULA_CHANGE_EMAIL: message = '\n'.join([ '{user} changed either the EULA or data sharing properties for domain {domain}.', '', 'The properties changed were:', '- Custom eula: {eula_old} --> {eula_new}', '- Can use data: {can_use_data_old} --> {can_use_data_new}' ]).format( user=self.request.couch_user.username, domain=self.domain, eula_old=old_attrs.custom_eula, eula_new=self.domain_object.internal.custom_eula, can_use_data_old=old_attrs.can_use_data, can_use_data_new=self.domain_object.internal.can_use_data, ) send_mail_async.delay( 'Custom EULA or data use flags changed for {}'.format(self.domain), message, settings.DEFAULT_FROM_EMAIL, [settings.EULA_CHANGE_EMAIL] ) messages.success(request, _("The internal information for project %s was successfully updated!") % self.domain) else: messages.error(request, _( "Your settings are not valid, see below for errors. Correct them and try again!")) return self.get(request, *args, **kwargs) class EditInternalCalculationsView(BaseInternalDomainSettingsView): urlname = 'domain_internal_calculations' page_title = ugettext_lazy("Calculated Properties") template_name = 'domain/internal_calculations.html' @property def page_context(self): return { 'calcs': CALCS, 'order': CALC_ORDER, } @login_and_domain_required @require_superuser def calculated_properties(request, domain): calc_tag = request.GET.get("calc_tag", '').split('--') extra_arg = calc_tag[1] if len(calc_tag) > 1 else '' calc_tag = calc_tag[0] if not calc_tag or calc_tag not in CALC_FNS.keys(): data = {"error": 'This tag does not exist'} else: data = {"value": dom_calc(calc_tag, domain, extra_arg)} return json_response(data) def _publish_snapshot(request, domain, published_snapshot=None): snapshots = domain.snapshots() for snapshot in snapshots: if snapshot.published: snapshot.published = False if not published_snapshot or snapshot.name != published_snapshot.name: snapshot.save() if published_snapshot: if published_snapshot.copied_from.name != domain.name: messages.error(request, "Invalid snapshot") return False # cda stuff. In order to publish a snapshot, a user must have agreed to this published_snapshot.cda.signed = True published_snapshot.cda.date = datetime.datetime.utcnow() published_snapshot.cda.type = 'Content Distribution Agreement' if request.couch_user: published_snapshot.cda.user_id = request.couch_user.get_id published_snapshot.cda.user_ip = get_ip(request) published_snapshot.published = True published_snapshot.save() _notification_email_on_publish(domain, published_snapshot, request.couch_user) return True def _notification_email_on_publish(domain, snapshot, published_by): params = {"domain": domain, "snapshot": snapshot, "published_by": published_by, "url_base": get_site_domain()} text_content = render_to_string( "domain/email/published_app_notification.txt", params) html_content = render_to_string( "domain/email/published_app_notification.html", params) recipients = settings.EXCHANGE_NOTIFICATION_RECIPIENTS subject = "New App on Exchange: %s" % snapshot.title try: for recipient in recipients: send_html_email_async.delay(subject, recipient, html_content, text_content=text_content, email_from=settings.DEFAULT_FROM_EMAIL) except Exception: logging.warning("Can't send notification email, " "but the message was:\n%s" % text_content) @domain_admin_required def set_published_snapshot(request, domain, snapshot_name=''): domain = request.project snapshots = domain.snapshots() if request.method == 'POST': if snapshot_name != '': published_snapshot = Domain.get_by_name(snapshot_name) _publish_snapshot(request, domain, published_snapshot=published_snapshot) else: _publish_snapshot(request, domain) return redirect('domain_snapshot_settings', domain.name) class ProBonoMixin(): page_title = ugettext_lazy("Pro-Bono Application") is_submitted = False url_name = None @property def requesting_domain(self): raise NotImplementedError @property @memoized def pro_bono_form(self): if self.request.method == 'POST': return ProBonoForm(self.use_domain_field, self.request.POST) return ProBonoForm(self.use_domain_field) @property def page_context(self): return { 'pro_bono_form': self.pro_bono_form, 'is_submitted': self.is_submitted, } @property def page_url(self): return self.url_name def post(self, request, *args, **kwargs): if self.pro_bono_form.is_valid(): self.pro_bono_form.process_submission(domain=self.requesting_domain) self.is_submitted = True return self.get(request, *args, **kwargs) class ProBonoStaticView(ProBonoMixin, BasePageView): template_name = 'domain/pro_bono/static.html' urlname = 'pro_bono_static' use_domain_field = True @property def requesting_domain(self): return self.pro_bono_form.cleaned_data['domain'] class ProBonoView(ProBonoMixin, DomainAccountingSettings): template_name = 'domain/pro_bono/domain.html' urlname = 'pro_bono' use_domain_field = False @property def requesting_domain(self): return self.domain @property def parent_pages(self): return [ { 'title': DomainSubscriptionView.page_title, 'url': reverse(DomainSubscriptionView.urlname, args=[self.domain]), } ] @property def section_url(self): return self.page_url class FeaturePreviewsView(BaseAdminProjectSettingsView): urlname = 'feature_previews' page_title = ugettext_lazy("Feature Previews") template_name = 'domain/admin/feature_previews.html' @memoized def features(self): features = [] for preview_name in dir(feature_previews): if not preview_name.startswith('__'): preview = getattr(feature_previews, preview_name) if isinstance(preview, feature_previews.FeaturePreview) and preview.has_privilege(self.request): features.append((preview, preview.enabled(self.domain))) return features def get_toggle(self, slug): if not slug in [f.slug for f, _ in self.features()]: raise Http404() try: return Toggle.get(slug) except ResourceNotFound: return Toggle(slug=slug) @property def page_context(self): return { 'features': self.features(), } def post(self, request, *args, **kwargs): for feature, enabled in self.features(): self.update_feature(feature, enabled, feature.slug in request.POST) return redirect('feature_previews', domain=self.domain) def update_feature(self, feature, current_state, new_state): if current_state != new_state: feature.set(self.domain, new_state, NAMESPACE_DOMAIN) if feature.save_fn is not None: feature.save_fn(self.domain, new_state) class FeatureFlagsView(BaseAdminProjectSettingsView): urlname = 'domain_feature_flags' page_title = ugettext_lazy("Feature Flags") template_name = 'domain/admin/feature_flags.html' @method_decorator(require_superuser) def dispatch(self, request, *args, **kwargs): return super(FeatureFlagsView, self).dispatch(request, *args, **kwargs) @memoized def enabled_flags(self): def _sort_key(toggle_enabled_tuple): return (not toggle_enabled_tuple[1], not toggle_enabled_tuple[2], toggle_enabled_tuple[0].label) return sorted( [(toggle, toggle.enabled(self.domain), toggle.enabled(self.request.couch_user.username)) for toggle in all_toggles()], key=_sort_key, ) @property def page_context(self): return { 'flags': self.enabled_flags(), } class TransferDomainView(BaseAdminProjectSettingsView): urlname = 'transfer_domain_view' page_title = ugettext_lazy("Transfer Project") template_name = 'domain/admin/transfer_domain.html' @property @memoized def active_transfer(self): return TransferDomainRequest.get_active_transfer(self.domain, self.request.user.username) @property @memoized def transfer_domain_form(self): return TransferDomainForm(self.domain, self.request.user.username, self.request.POST or None) def get(self, request, *args, **kwargs): if self.active_transfer: self.template_name = 'domain/admin/transfer_domain_pending.html' if request.GET.get('resend', None): self.active_transfer.send_transfer_request() messages.info(request, _(u"Resent transfer request for project '{domain}'").format(domain=self.domain)) return super(TransferDomainView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): form = self.transfer_domain_form if form.is_valid(): # Initiate domain transfer transfer = form.save() transfer.send_transfer_request() return HttpResponseRedirect(self.page_url) context = self.get_context_data(**kwargs) return self.render_to_response(context) @property def page_context(self): if self.active_transfer: return {'transfer': self.active_transfer.as_dict()} else: return {'form': self.transfer_domain_form} @method_decorator(domain_admin_required) def dispatch(self, request, *args, **kwargs): if not TRANSFER_DOMAIN.enabled(request.domain): raise Http404() return super(TransferDomainView, self).dispatch(request, *args, **kwargs) class ActivateTransferDomainView(BasePageView): urlname = 'activate_transfer_domain' page_title = 'Activate Domain Transfer' template_name = 'domain/activate_transfer_domain.html' @property @memoized def active_transfer(self): return TransferDomainRequest.get_by_guid(self.guid) @property def page_context(self): if self.active_transfer: return {'transfer': self.active_transfer.as_dict()} else: return {} @property def page_url(self): return self.request.get_full_path() def get(self, request, guid, *args, **kwargs): self.guid = guid if (self.active_transfer and self.active_transfer.to_username != request.user.username and not request.user.is_superuser): return HttpResponseRedirect(reverse("no_permissions")) return super(ActivateTransferDomainView, self).get(request, *args, **kwargs) def post(self, request, guid, *args, **kwargs): self.guid = guid if not self.active_transfer: raise Http404() if self.active_transfer.to_username != request.user.username and not request.user.is_superuser: return HttpResponseRedirect(reverse("no_permissions")) self.active_transfer.transfer_domain(ip=get_ip(request)) messages.success(request, _(u"Successfully transferred ownership of project '{domain}'") .format(domain=self.active_transfer.domain)) return HttpResponseRedirect(reverse('dashboard_default', args=[self.active_transfer.domain])) @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(ActivateTransferDomainView, self).dispatch(*args, **kwargs) class DeactivateTransferDomainView(View): def post(self, request, guid, *args, **kwargs): transfer = TransferDomainRequest.get_by_guid(guid) if not transfer: return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if (transfer.to_username != request.user.username and transfer.from_username != request.user.username and not request.user.is_superuser): return HttpResponseRedirect(reverse("no_permissions")) transfer.active = False transfer.save() referer = request.META.get('HTTP_REFERER', '/') # Do not want to send them back to the activate page if referer.endswith(reverse('activate_transfer_domain', args=[guid])): messages.info(request, _(u"Declined ownership of project '{domain}'").format(domain=transfer.domain)) return HttpResponseRedirect('/') else: return HttpResponseRedirect(referer) @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(DeactivateTransferDomainView, self).dispatch(*args, **kwargs) from corehq.apps.smsbillables.forms import PublicSMSRateCalculatorForm from corehq.apps.smsbillables.async_handlers import PublicSMSRatesAsyncHandler class PublicSMSRatesView(BasePageView, AsyncHandlerMixin): urlname = 'public_sms_rates_view' page_title = ugettext_lazy("SMS Rate Calculator") template_name = 'domain/admin/global_sms_rates.html' async_handlers = [PublicSMSRatesAsyncHandler] @property def page_url(self): return reverse(self.urlname) @property def page_context(self): return { 'rate_calc_form': PublicSMSRateCalculatorForm() } def post(self, request, *args, **kwargs): return self.async_response or self.get(request, *args, **kwargs) class SMSRatesView(BaseAdminProjectSettingsView, AsyncHandlerMixin): urlname = 'domain_sms_rates_view' page_title = ugettext_lazy("SMS Rate Calculator") template_name = 'domain/admin/sms_rates.html' async_handlers = [ SMSRatesAsyncHandler, SMSRatesSelect2AsyncHandler, ] @property @memoized def rate_calc_form(self): if self.request.method == 'POST': return SMSRateCalculatorForm(self.domain, self.request.POST) return SMSRateCalculatorForm(self.domain) @property def page_context(self): return { 'rate_calc_form': self.rate_calc_form, } def post(self, request, *args, **kwargs): if self.async_response is not None: return self.async_response return self.get(request, *args, **kwargs) @require_POST @domain_admin_required def org_request(request, domain): org_name = request.POST.get("org_name", None) org = Organization.get_by_name(org_name) if org: org_request = OrgRequest.get_requests(org_name, domain=domain, user_id=request.couch_user.get_id) if not org_request: org_request = OrgRequest(organization=org_name, domain=domain, requested_by=request.couch_user.get_id, requested_on=datetime.datetime.utcnow()) org_request.save() _send_request_notification_email(request, org, domain) messages.success(request, "Your request was submitted. The admin of organization %s can now choose to manage the project %s" % (org_name, domain)) else: messages.error(request, "You've already submitted a request to this organization") else: messages.error(request, "The organization '%s' does not exist" % org_name) return HttpResponseRedirect(reverse('domain_org_settings', args=[domain])) def _send_request_notification_email(request, org, dom): params = {"org": org, "dom": dom, "requestee": request.couch_user, "url_base": get_site_domain()} text_content = render_to_string( "domain/email/org_request_notification.txt", params) html_content = render_to_string( "domain/email/org_request_notification.html", params) recipients = [member.email for member in org.get_members() if member.is_org_admin(org.name)] subject = "New request to add a project to your organization! -- CommcareHQ" try: for recipient in recipients: send_html_email_async.delay(subject, recipient, html_content, text_content=text_content, email_from=settings.DEFAULT_FROM_EMAIL) except Exception: logging.warning("Can't send notification email, " "but the message was:\n%s" % text_content) class BaseCardView(DomainAccountingSettings): @property def payment_method(self): payment_method, __ = StripePaymentMethod.objects.get_or_create( web_user=self.request.user.username, method_type=PaymentMethodType.STRIPE, ) return payment_method def _generic_error(self): error = ("Something went wrong while processing your request. " "We're working quickly to resolve the issue. " "Please try again in a few hours.") return json_response({'error': error}, status_code=500) def _stripe_error(self, e): body = e.json_body err = body['error'] return json_response({'error': err['message'], 'cards': self.payment_method.all_cards_serialized(self.account)}, status_code=502) class CardView(BaseCardView): """View for dealing with a single Credit Card""" url_name = "card_view" def post(self, request, domain, card_token): try: card = self.payment_method.get_card(card_token) if request.POST.get("is_autopay") == 'true': self.payment_method.set_autopay(card, self.account) elif request.POST.get("is_autopay") == 'false': self.payment_method.unset_autopay(card, self.account) except self.payment_method.STRIPE_GENERIC_ERROR as e: return self._stripe_error(e) except Exception as e: return self._generic_error() return json_response({'cards': self.payment_method.all_cards_serialized(self.account)}) def delete(self, request, domain, card_token): try: self.payment_method.remove_card(card_token) except self.payment_method.STRIPE_GENERIC_ERROR as e: return self._stripe_error(e) return json_response({'cards': self.payment_method.all_cards_serialized(self.account)}) class CardsView(BaseCardView): """View for dealing Credit Cards""" url_name = "cards_view" def get(self, request, domain): return json_response({'cards': self.payment_method.all_cards_serialized(self.account)}) def post(self, request, domain): stripe_token = request.POST.get('token') autopay = request.POST.get('autopay') == 'true' try: self.payment_method.create_card(stripe_token, self.account, autopay) except self.payment_method.STRIPE_GENERIC_ERROR as e: return self._stripe_error(e) except Exception as e: return self._generic_error() return json_response({'cards': self.payment_method.all_cards_serialized(self.account)})
en
0.911647
# Domain not required here - we could be selecting it for the first time. See notes domain.decorators # about why we need this custom login_required decorator # mirrors logic in login_and_domain_required Paving the way for a world of entirely class-based views. Let's do this, guys. :-) Set strict_domain_fetching to True in subclasses to bypass the cache. The base class for all project settings views that require administrative access. The base class for all the edit project information views. # ideally the template gets access to the domain doc through # some other means. otherwise it has to be supplied to every view reachable in that sidebar (every # view whose template extends users_base.html); mike says he's refactoring all of this imminently, so # i will not worry about it until he is done # now we fake a post # Models and UI are both written to support multiple products, # but for now, each subscription can only have one product. Returns the total balance of unpaid, unhidden invoices. Doesn't take into account the view settings on the page. Returns a StripePaymentHandler object # set the license of every multimedia file that doesn't yet have a license set # cda stuff. In order to publish a snapshot, a user must have agreed to this # Initiate domain transfer # Do not want to send them back to the activate page View for dealing with a single Credit Card View for dealing Credit Cards
1.249772
1
src/anmi/T2/funcs_met_iters.py
alexmascension/ANMI
1
9815
<filename>src/anmi/T2/funcs_met_iters.py from sympy import simplify, zeros from sympy import Matrix as mat import numpy as np from ..genericas import print_verbose, matriz_inversa def criterio_radio_espectral(H, verbose=True): eigs = [simplify(i) for i in list(H.eigenvals().keys())] print_verbose("||Criterio de radio espectral||", verbose) try: print_verbose( f"El mayor autovalor es {np.max(np.array(eigs, dtype=float))}. Si ese valor es < 1 entonces los métodos iterativos convergen.", verbose, ) except: print_verbose( f"Los autovalores son {eigs}. Si el mayor autovalor es < 1, entonces el método converge.", verbose, ) def criterio_diagonal_dominante(A, verbose=True): print_verbose( "||Criterio de Diagonal Dominante||\n Si la matriz es dominante por filas, los métodos de Jacobi y Gauss-Seidel convergen.", verbose, ) A_abs = abs(A) try: np.array(A_abs, dtype=float) for r in range(A.shape[0]): diff = 2 * A_abs[r, r] - sum(A_abs[r, :]) if diff <= 0: print_verbose( f"La fila {r} NO es dominante por filas: diff = {diff}.", verbose ) return print_verbose("La matriz CUMPLE EL CRITERIO DIAGONAL DOMINANTE", verbose) except: print_verbose( "La matriz tiene complejos o simbolos. Hay que verificar el criterio a mano.", verbose, ) def criterio_simetrica_definida_positiva(A, verbose=True): print_verbose( "||Criterio de Sim Def Pos||\n Si la matriz es simétrica y definida positiva, el método de Gauss-Seidel es convergente.", verbose, ) if A != A.T: print_verbose("La matriz NO es simétrica.", verbose) return det_A = A.det() print_verbose(f"El determinante de A es {det_A}.", verbose) try: if float(det_A) > 0: print_verbose( "La matriz es DEFINIDA POSITIVA (el determinante es positivo).", verbose, ) print_verbose("La matriz CUMPLE EL CRITERIO SIM DEF POS", verbose) else: print_verbose( "La matriz NO es DEFINIDA POSITIVA (el determinante no es positivo).", verbose, ) except: print_verbose( "No podemos determinar la positividad porque hay símbolos o complejos.", verbose, ) def criterio_SOR(verbose): print_verbose( "||Criterio SOR||\n Si la matriz es simétrica y definida positiva y w in (0, 2) el método SOR es convergente.\nSi w no (0, 2) el método SOR no converge.", verbose, ) def criterio_m_matriz(A, verbose): print_verbose( "||Criterio M matriz||\n Si la A es M-matriz entonces las descomposiciones de Jacobi y Gauss-Seidel son convergentes.\nA^-1 >= 0\naij < 0 para todo i =/= j", verbose, ) A_inv = matriz_inversa(A) try: np.array(A, dtype=float) if np.min(A_inv) >= 0: print_verbose("A^-1 >= 0", verbose) else: print_verbose("A^-1 < 0. La matriz NO CUMPLE el criterio", verbose) A_null_diag = A.copy() for i in range(A.shape[0]): A_null_diag[i, i] = 0 if np.max(A_null_diag) > 0: print_verbose( "La matriz tiene elementos no diagonales positivos. NO CUMPLE el criterio.", verbose, ) else: print_verbose("Los elementos no diagonales son negativos.", verbose) except: print_verbose( "La matriz tiene complejos o símbolos, no podemos verificar le criterio.", verbose, ) def metodo_iterativo( A, b=None, x0=None, metodo="jacobi", w=1.5, n_iter=10, verbose=True, ): """Aplica el método iterativo designado Args: A (matriz): Matriz de valores b (vector, optional): Vector de rhs. Por defecto es 1, 1, ..., 1. x0 (vector, optional): Vector con elementos de la primera iteración. Por defecto es 1, 1, ..., 1. metodo (str, optional): método de resolución, puede ser "jacobi", "gs" o "sor". w (float, optional): Peso para método sor. Defaults to 1.5. n_iter (int, optional): Número de iteraciones del método. Defaults to 10. verbose (bool, optional): Imprime resultados intermedios. Defaults to True. Returns: dict: 'x': vector de resultados para Ax=b, 'diff': diferencia entre Ax y b para cada iteración. """ if b is None: b = mat([[1] * A.shape[0]]).T if x0 is None: x0 = mat([[1] * A.shape[1]]).T D, L, U = ( zeros(A.shape[0], A.shape[1]), zeros(A.shape[0], A.shape[1]), zeros(A.shape[0], A.shape[1]), ) for r in range(A.shape[0]): for c in range(A.shape[1]): if r == c: D[r, c] = A[r, c] elif r < c: U[r, c] = -A[r, c] else: L[r, c] = -A[r, c] if metodo == "jacobi": M = D elif metodo == "gs": M = D - L elif metodo == "sor": M = D / w - L N = simplify(M - A) # Aplicamos criterios! criterio_radio_espectral(matriz_inversa(M) * N, verbose) criterio_diagonal_dominante(A, verbose) criterio_simetrica_definida_positiva(A, verbose) criterio_SOR(verbose) criterio_m_matriz(A, verbose) diff = [] for iter in range(n_iter): # Aplica el método x0 = (matriz_inversa(M)) * (N * x0 + b) diff.append(np.sum(np.abs(A * x0 - b))) return {"x": x0, "diff": diff}
<filename>src/anmi/T2/funcs_met_iters.py from sympy import simplify, zeros from sympy import Matrix as mat import numpy as np from ..genericas import print_verbose, matriz_inversa def criterio_radio_espectral(H, verbose=True): eigs = [simplify(i) for i in list(H.eigenvals().keys())] print_verbose("||Criterio de radio espectral||", verbose) try: print_verbose( f"El mayor autovalor es {np.max(np.array(eigs, dtype=float))}. Si ese valor es < 1 entonces los métodos iterativos convergen.", verbose, ) except: print_verbose( f"Los autovalores son {eigs}. Si el mayor autovalor es < 1, entonces el método converge.", verbose, ) def criterio_diagonal_dominante(A, verbose=True): print_verbose( "||Criterio de Diagonal Dominante||\n Si la matriz es dominante por filas, los métodos de Jacobi y Gauss-Seidel convergen.", verbose, ) A_abs = abs(A) try: np.array(A_abs, dtype=float) for r in range(A.shape[0]): diff = 2 * A_abs[r, r] - sum(A_abs[r, :]) if diff <= 0: print_verbose( f"La fila {r} NO es dominante por filas: diff = {diff}.", verbose ) return print_verbose("La matriz CUMPLE EL CRITERIO DIAGONAL DOMINANTE", verbose) except: print_verbose( "La matriz tiene complejos o simbolos. Hay que verificar el criterio a mano.", verbose, ) def criterio_simetrica_definida_positiva(A, verbose=True): print_verbose( "||Criterio de Sim Def Pos||\n Si la matriz es simétrica y definida positiva, el método de Gauss-Seidel es convergente.", verbose, ) if A != A.T: print_verbose("La matriz NO es simétrica.", verbose) return det_A = A.det() print_verbose(f"El determinante de A es {det_A}.", verbose) try: if float(det_A) > 0: print_verbose( "La matriz es DEFINIDA POSITIVA (el determinante es positivo).", verbose, ) print_verbose("La matriz CUMPLE EL CRITERIO SIM DEF POS", verbose) else: print_verbose( "La matriz NO es DEFINIDA POSITIVA (el determinante no es positivo).", verbose, ) except: print_verbose( "No podemos determinar la positividad porque hay símbolos o complejos.", verbose, ) def criterio_SOR(verbose): print_verbose( "||Criterio SOR||\n Si la matriz es simétrica y definida positiva y w in (0, 2) el método SOR es convergente.\nSi w no (0, 2) el método SOR no converge.", verbose, ) def criterio_m_matriz(A, verbose): print_verbose( "||Criterio M matriz||\n Si la A es M-matriz entonces las descomposiciones de Jacobi y Gauss-Seidel son convergentes.\nA^-1 >= 0\naij < 0 para todo i =/= j", verbose, ) A_inv = matriz_inversa(A) try: np.array(A, dtype=float) if np.min(A_inv) >= 0: print_verbose("A^-1 >= 0", verbose) else: print_verbose("A^-1 < 0. La matriz NO CUMPLE el criterio", verbose) A_null_diag = A.copy() for i in range(A.shape[0]): A_null_diag[i, i] = 0 if np.max(A_null_diag) > 0: print_verbose( "La matriz tiene elementos no diagonales positivos. NO CUMPLE el criterio.", verbose, ) else: print_verbose("Los elementos no diagonales son negativos.", verbose) except: print_verbose( "La matriz tiene complejos o símbolos, no podemos verificar le criterio.", verbose, ) def metodo_iterativo( A, b=None, x0=None, metodo="jacobi", w=1.5, n_iter=10, verbose=True, ): """Aplica el método iterativo designado Args: A (matriz): Matriz de valores b (vector, optional): Vector de rhs. Por defecto es 1, 1, ..., 1. x0 (vector, optional): Vector con elementos de la primera iteración. Por defecto es 1, 1, ..., 1. metodo (str, optional): método de resolución, puede ser "jacobi", "gs" o "sor". w (float, optional): Peso para método sor. Defaults to 1.5. n_iter (int, optional): Número de iteraciones del método. Defaults to 10. verbose (bool, optional): Imprime resultados intermedios. Defaults to True. Returns: dict: 'x': vector de resultados para Ax=b, 'diff': diferencia entre Ax y b para cada iteración. """ if b is None: b = mat([[1] * A.shape[0]]).T if x0 is None: x0 = mat([[1] * A.shape[1]]).T D, L, U = ( zeros(A.shape[0], A.shape[1]), zeros(A.shape[0], A.shape[1]), zeros(A.shape[0], A.shape[1]), ) for r in range(A.shape[0]): for c in range(A.shape[1]): if r == c: D[r, c] = A[r, c] elif r < c: U[r, c] = -A[r, c] else: L[r, c] = -A[r, c] if metodo == "jacobi": M = D elif metodo == "gs": M = D - L elif metodo == "sor": M = D / w - L N = simplify(M - A) # Aplicamos criterios! criterio_radio_espectral(matriz_inversa(M) * N, verbose) criterio_diagonal_dominante(A, verbose) criterio_simetrica_definida_positiva(A, verbose) criterio_SOR(verbose) criterio_m_matriz(A, verbose) diff = [] for iter in range(n_iter): # Aplica el método x0 = (matriz_inversa(M)) * (N * x0 + b) diff.append(np.sum(np.abs(A * x0 - b))) return {"x": x0, "diff": diff}
es
0.733967
Aplica el método iterativo designado Args: A (matriz): Matriz de valores b (vector, optional): Vector de rhs. Por defecto es 1, 1, ..., 1. x0 (vector, optional): Vector con elementos de la primera iteración. Por defecto es 1, 1, ..., 1. metodo (str, optional): método de resolución, puede ser "jacobi", "gs" o "sor". w (float, optional): Peso para método sor. Defaults to 1.5. n_iter (int, optional): Número de iteraciones del método. Defaults to 10. verbose (bool, optional): Imprime resultados intermedios. Defaults to True. Returns: dict: 'x': vector de resultados para Ax=b, 'diff': diferencia entre Ax y b para cada iteración. # Aplicamos criterios! # Aplica el método
2.649837
3
{{cookiecutter.project_hyphen}}/{{cookiecutter.project_slug}}/__init__.py
zhangxianbing/cookiecutter-pypackage
1
9816
"""{{ cookiecutter.project_name }} - {{ cookiecutter.project_short_description }}""" __version__ = "{{ cookiecutter.project_version }}" __author__ = """{{ cookiecutter.author_name }}""" __email__ = "{{ cookiecutter.author_email }}" prog_name = "{{ cookiecutter.project_hyphen }}"
"""{{ cookiecutter.project_name }} - {{ cookiecutter.project_short_description }}""" __version__ = "{{ cookiecutter.project_version }}" __author__ = """{{ cookiecutter.author_name }}""" __email__ = "{{ cookiecutter.author_email }}" prog_name = "{{ cookiecutter.project_hyphen }}"
en
0.634386
{{ cookiecutter.project_name }} - {{ cookiecutter.project_short_description }} {{ cookiecutter.author_name }}
1.188211
1
services/osparc-gateway-server/tests/integration/_dask_helpers.py
mguidon/osparc-dask-gateway
1
9817
from typing import NamedTuple from dask_gateway_server.app import DaskGateway class DaskGatewayServer(NamedTuple): address: str proxy_address: str password: str server: DaskGateway
from typing import NamedTuple from dask_gateway_server.app import DaskGateway class DaskGatewayServer(NamedTuple): address: str proxy_address: str password: str server: DaskGateway
none
1
1.664768
2
rdkit/ML/InfoTheory/BitRank.py
kazuyaujihara/rdkit
1,609
9818
<filename>rdkit/ML/InfoTheory/BitRank.py # # Copyright (C) 2001,2002,2003 <NAME> and Rational Discovery LLC # """ Functionality for ranking bits using info gains **Definitions used in this module** - *sequence*: an object capable of containing other objects which supports __getitem__() and __len__(). Examples of these include lists, tuples, and Numeric arrays. - *IntVector*: an object containing integers which supports __getitem__() and __len__(). Examples include lists, tuples, Numeric Arrays, and BitVects. **NOTE**: Neither *sequences* nor *IntVectors* need to support item assignment. It is perfectly acceptable for them to be read-only, so long as they are random-access. """ import numpy from rdkit.ML.InfoTheory import entropy def FormCounts(bitVects, actVals, whichBit, nPossibleActs, nPossibleBitVals=2): """ generates the counts matrix for a particular bit **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - whichBit: an integer, the bit number to use. - nPossibleActs: the (integer) number of possible activity values. - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. **Returns** a Numeric array with the counts **Notes** This is really intended for internal use. """ if len(bitVects) != len(actVals): raise ValueError('var and activity lists should be the same length') res = numpy.zeros((nPossibleBitVals, nPossibleActs), numpy.integer) for i in range(len(bitVects)): res[bitVects[i][whichBit], actVals[i]] += 1 return res def CalcInfoGains(bitVects, actVals, nPossibleActs, nPossibleBitVals=2): """ Calculates the information gain for a set of points and activity values **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - nPossibleActs: the (integer) number of possible activity values. - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. **Returns** a list of floats """ if len(bitVects) != len(actVals): raise ValueError('var and activity lists should be the same length') nBits = len(bitVects[0]) res = numpy.zeros(nBits, numpy.float) for bit in range(nBits): counts = FormCounts(bitVects, actVals, bit, nPossibleActs, nPossibleBitVals=nPossibleBitVals) res[bit] = entropy.InfoGain(counts) return res def RankBits(bitVects, actVals, nPossibleBitVals=2, metricFunc=CalcInfoGains): """ Rank a set of bits according to a metric function **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. - metricFunc: (optional) the metric function to be used. See _CalcInfoGains()_ for a description of the signature of this function. **Returns** A 2-tuple containing: - the relative order of the bits (a list of ints) - the metric calculated for each bit (a list of floats) """ nPossibleActs = max(actVals) + 1 metrics = metricFunc(bitVects, actVals, nPossibleActs, nPossibleBitVals=nPossibleBitVals) bitOrder = list(numpy.argsort(metrics)) bitOrder.reverse() return bitOrder, metrics def AnalyzeSparseVects(bitVects, actVals): """ #DOC **Arguments** - bitVects: a *sequence* containing SBVs - actVals: a *sequence* **Returns** a list of floats **Notes** - these need to be bit vects and binary activities """ nPts = len(bitVects) if nPts != len(actVals): raise ValueError('var and activity lists should be the same length') nBits = bitVects[0].GetSize() actives = numpy.zeros(nBits, numpy.integer) inactives = numpy.zeros(nBits, numpy.integer) nActives, nInactives = 0, 0 for i in range(nPts): sig, act = bitVects[i], actVals[i] onBitList = sig.GetOnBits() if act: for bit in onBitList: actives[bit] += 1 nActives += 1 else: for bit in onBitList: inactives[bit] += 1 nInactives += 1 resTbl = numpy.zeros((2, 2), numpy.integer) res = [] gains = [] for bit in range(nBits): nAct, nInact = actives[bit], inactives[bit] if nAct or nInact: resTbl[0, 0] = nAct resTbl[1, 0] = nPts - nAct resTbl[0, 1] = nInact resTbl[1, 1] = nPts - nInact gain = entropy.InfoGain(resTbl) gains.append(gain) res.append((bit, gain, nAct, nInact)) return res, gains def SparseRankBits(bitVects, actVals, metricFunc=AnalyzeSparseVects): """ Rank a set of bits according to a metric function **Arguments** - bitVects: a *sequence* containing SBVs - actVals: a *sequence* - metricFunc: (optional) the metric function to be used. See _SparseCalcInfoGains()_ for a description of the signature of this function. **Returns** A 2-tuple containing: - the relative order of the bits (a list of ints) - the metric calculated for each bit (a list of floats) **Notes** - these need to be bit vects and binary activities """ info, metrics = metricFunc(bitVects, actVals) bitOrder = list(numpy.argsort(metrics)) bitOrder.reverse() return bitOrder, info
<filename>rdkit/ML/InfoTheory/BitRank.py # # Copyright (C) 2001,2002,2003 <NAME> and Rational Discovery LLC # """ Functionality for ranking bits using info gains **Definitions used in this module** - *sequence*: an object capable of containing other objects which supports __getitem__() and __len__(). Examples of these include lists, tuples, and Numeric arrays. - *IntVector*: an object containing integers which supports __getitem__() and __len__(). Examples include lists, tuples, Numeric Arrays, and BitVects. **NOTE**: Neither *sequences* nor *IntVectors* need to support item assignment. It is perfectly acceptable for them to be read-only, so long as they are random-access. """ import numpy from rdkit.ML.InfoTheory import entropy def FormCounts(bitVects, actVals, whichBit, nPossibleActs, nPossibleBitVals=2): """ generates the counts matrix for a particular bit **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - whichBit: an integer, the bit number to use. - nPossibleActs: the (integer) number of possible activity values. - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. **Returns** a Numeric array with the counts **Notes** This is really intended for internal use. """ if len(bitVects) != len(actVals): raise ValueError('var and activity lists should be the same length') res = numpy.zeros((nPossibleBitVals, nPossibleActs), numpy.integer) for i in range(len(bitVects)): res[bitVects[i][whichBit], actVals[i]] += 1 return res def CalcInfoGains(bitVects, actVals, nPossibleActs, nPossibleBitVals=2): """ Calculates the information gain for a set of points and activity values **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - nPossibleActs: the (integer) number of possible activity values. - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. **Returns** a list of floats """ if len(bitVects) != len(actVals): raise ValueError('var and activity lists should be the same length') nBits = len(bitVects[0]) res = numpy.zeros(nBits, numpy.float) for bit in range(nBits): counts = FormCounts(bitVects, actVals, bit, nPossibleActs, nPossibleBitVals=nPossibleBitVals) res[bit] = entropy.InfoGain(counts) return res def RankBits(bitVects, actVals, nPossibleBitVals=2, metricFunc=CalcInfoGains): """ Rank a set of bits according to a metric function **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. - metricFunc: (optional) the metric function to be used. See _CalcInfoGains()_ for a description of the signature of this function. **Returns** A 2-tuple containing: - the relative order of the bits (a list of ints) - the metric calculated for each bit (a list of floats) """ nPossibleActs = max(actVals) + 1 metrics = metricFunc(bitVects, actVals, nPossibleActs, nPossibleBitVals=nPossibleBitVals) bitOrder = list(numpy.argsort(metrics)) bitOrder.reverse() return bitOrder, metrics def AnalyzeSparseVects(bitVects, actVals): """ #DOC **Arguments** - bitVects: a *sequence* containing SBVs - actVals: a *sequence* **Returns** a list of floats **Notes** - these need to be bit vects and binary activities """ nPts = len(bitVects) if nPts != len(actVals): raise ValueError('var and activity lists should be the same length') nBits = bitVects[0].GetSize() actives = numpy.zeros(nBits, numpy.integer) inactives = numpy.zeros(nBits, numpy.integer) nActives, nInactives = 0, 0 for i in range(nPts): sig, act = bitVects[i], actVals[i] onBitList = sig.GetOnBits() if act: for bit in onBitList: actives[bit] += 1 nActives += 1 else: for bit in onBitList: inactives[bit] += 1 nInactives += 1 resTbl = numpy.zeros((2, 2), numpy.integer) res = [] gains = [] for bit in range(nBits): nAct, nInact = actives[bit], inactives[bit] if nAct or nInact: resTbl[0, 0] = nAct resTbl[1, 0] = nPts - nAct resTbl[0, 1] = nInact resTbl[1, 1] = nPts - nInact gain = entropy.InfoGain(resTbl) gains.append(gain) res.append((bit, gain, nAct, nInact)) return res, gains def SparseRankBits(bitVects, actVals, metricFunc=AnalyzeSparseVects): """ Rank a set of bits according to a metric function **Arguments** - bitVects: a *sequence* containing SBVs - actVals: a *sequence* - metricFunc: (optional) the metric function to be used. See _SparseCalcInfoGains()_ for a description of the signature of this function. **Returns** A 2-tuple containing: - the relative order of the bits (a list of ints) - the metric calculated for each bit (a list of floats) **Notes** - these need to be bit vects and binary activities """ info, metrics = metricFunc(bitVects, actVals) bitOrder = list(numpy.argsort(metrics)) bitOrder.reverse() return bitOrder, info
en
0.678291
# # Copyright (C) 2001,2002,2003 <NAME> and Rational Discovery LLC # Functionality for ranking bits using info gains **Definitions used in this module** - *sequence*: an object capable of containing other objects which supports __getitem__() and __len__(). Examples of these include lists, tuples, and Numeric arrays. - *IntVector*: an object containing integers which supports __getitem__() and __len__(). Examples include lists, tuples, Numeric Arrays, and BitVects. **NOTE**: Neither *sequences* nor *IntVectors* need to support item assignment. It is perfectly acceptable for them to be read-only, so long as they are random-access. generates the counts matrix for a particular bit **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - whichBit: an integer, the bit number to use. - nPossibleActs: the (integer) number of possible activity values. - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. **Returns** a Numeric array with the counts **Notes** This is really intended for internal use. Calculates the information gain for a set of points and activity values **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - nPossibleActs: the (integer) number of possible activity values. - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. **Returns** a list of floats Rank a set of bits according to a metric function **Arguments** - bitVects: a *sequence* containing *IntVectors* - actVals: a *sequence* - nPossibleBitVals: (optional) if specified, this integer provides the maximum value attainable by the (increasingly inaccurately named) bits in _bitVects_. - metricFunc: (optional) the metric function to be used. See _CalcInfoGains()_ for a description of the signature of this function. **Returns** A 2-tuple containing: - the relative order of the bits (a list of ints) - the metric calculated for each bit (a list of floats) #DOC **Arguments** - bitVects: a *sequence* containing SBVs - actVals: a *sequence* **Returns** a list of floats **Notes** - these need to be bit vects and binary activities Rank a set of bits according to a metric function **Arguments** - bitVects: a *sequence* containing SBVs - actVals: a *sequence* - metricFunc: (optional) the metric function to be used. See _SparseCalcInfoGains()_ for a description of the signature of this function. **Returns** A 2-tuple containing: - the relative order of the bits (a list of ints) - the metric calculated for each bit (a list of floats) **Notes** - these need to be bit vects and binary activities
2.694171
3
trainer/dataset.py
vinay-swamy/gMVP
2
9819
import tensorflow as tf import os import pickle import numpy as np from constant_params import input_feature_dim, window_size def build_dataset(input_tfrecord_files, batch_size): drop_remainder = False feature_description = { 'label': tf.io.FixedLenFeature([], tf.int64), 'ref_aa': tf.io.FixedLenFeature([], tf.int64), 'alt_aa': tf.io.FixedLenFeature([], tf.int64), 'feature': tf.io.FixedLenFeature([], tf.string), 'mask': tf.io.FixedLenFeature([], tf.string), 'var_id': tf.io.FixedLenFeature([], tf.string), } def _parser(example_proto): parsed = tf.io.parse_single_example(example_proto, feature_description) label, ref_aa, alt_aa = parsed['label'], parsed['ref_aa'], parsed[ 'alt_aa'] var_id = parsed['var_id'] ref_aa, alt_aa, label = tf.cast(ref_aa, tf.int32), tf.cast( alt_aa, tf.int32), tf.cast(label, tf.float32) feature = tf.io.decode_raw(parsed['feature'], tf.float32) feature = tf.reshape(feature, (window_size, input_feature_dim)) mask = tf.io.decode_raw(parsed['mask'], tf.float32) mask = tf.reshape(mask, (window_size, )) h = window_size // 2 #mask the postion of interest mask = tf.concat( [mask[:h], tf.cast([ 1, ], dtype=tf.float32), mask[h + 1:]], axis=-1) ''' pos_encoding = 1.0 + tf.cast( tf.math.abs(window_size // 2 - tf.range(window_size)), dtype=tf.float32) #pos_encoding = tf.math.log() / tf.math.log(2.0) feature = tf.concat([feature, pos_encoding[:, tf.newaxis]], axis=-1) ''' return var_id, ref_aa, alt_aa, feature, label, mask dataset = tf.data.TFRecordDataset(input_tfrecord_files) options = tf.data.Options() options.experimental_threading.max_intra_op_parallelism = 1 dataset = dataset.with_options(options) dataset = dataset.shuffle(2048) dataset = dataset.map(_parser, num_parallel_calls=8) dataset = dataset.batch(batch_size) #dataset = dataset.prefetch(4) return dataset def build_all_possible_missenses_dataset(tr_list, feature_dir, batch_size): amino_acid_order = 'ACDEFGHIKLMNPQRSTVWY*' def _gen_data(): for transcript_id in tr_list: feature_path = f'{feature_dir}/{transcript_id}.pickle' if not os.path.exists(feature_path): continue print(feature_path, flush=True) with open(feature_path, 'rb') as fr: feature = pickle.load(fr) L = feature.shape[0] w = window_size // 2 for aa_pos in range(L): ref_aa = int(feature[aa_pos, 0]) start = max(aa_pos - w, 0) end = min(L, aa_pos + 1 + w) var_start = start - (aa_pos - w) var_end = var_start + (end - start) var_feature = np.zeros([w * 2 + 1, feature.shape[1]]) var_feature[var_start:var_end] = feature[start:end] mask = np.ones((w * 2 + 1, ), dtype=np.float32) mask[var_start:var_end] = 0.0 mask[w] = 1.0 for alt_aa in range(20): var_id = f'{transcript_id}_{str(aa_pos+1)}_{amino_acid_order[ref_aa]}_{amino_acid_order[alt_aa]}'.encode( 'utf-8') yield var_id, np.int32(ref_aa), np.int32( alt_aa), np.float32(var_feature), np.float32(mask) dataset = tf.data.Dataset.from_generator( _gen_data, (tf.string, tf.int32, tf.int32, tf.float32, tf.float32), (tf.TensorShape(()), tf.TensorShape(()), tf.TensorShape( ()), tf.TensorShape((window_size, input_feature_dim)), tf.TensorShape((window_size, )))) options = tf.data.Options() options.experimental_threading.max_intra_op_parallelism = 1 dataset = dataset.with_options(options) #dataset = dataset.map(_parser, num_parallel_calls=8) dataset = dataset.batch(batch_size) dataset = dataset.prefetch(4) return dataset def build_test_dataset(input_tfrecord_files, batch_size): drop_remainder = False feature_description = { 'ref_aa': tf.io.FixedLenFeature([], tf.int64), 'alt_aa': tf.io.FixedLenFeature([], tf.int64), 'feature': tf.io.FixedLenFeature([], tf.string), 'mask': tf.io.FixedLenFeature([], tf.string), 'var_id': tf.io.FixedLenFeature([], tf.string), } def _parser(example_proto): parsed = tf.io.parse_single_example(example_proto, feature_description) ref_aa, alt_aa = parsed['ref_aa'], parsed['alt_aa'] var_id = parsed['var_id'] ref_aa, alt_aa = tf.cast(ref_aa, tf.int32), tf.cast(alt_aa, tf.int32) feature = tf.io.decode_raw(parsed['feature'], tf.float32) feature = tf.reshape(feature, (window_size, input_feature_dim)) mask = tf.io.decode_raw(parsed['mask'], tf.float32) mask = tf.reshape(mask, (window_size, )) h = window_size // 2 #mask the postion of interest mask = tf.concat( [mask[:h], tf.cast([ 1, ], dtype=tf.float32), mask[h + 1:]], axis=-1) return var_id, ref_aa, alt_aa, feature, mask dataset = tf.data.TFRecordDataset(input_tfrecord_files) options = tf.data.Options() options.experimental_threading.max_intra_op_parallelism = 1 dataset = dataset.with_options(options) dataset = dataset.map(_parser, num_parallel_calls=8) dataset = dataset.batch(batch_size) #dataset = dataset.prefetch(4) return dataset
import tensorflow as tf import os import pickle import numpy as np from constant_params import input_feature_dim, window_size def build_dataset(input_tfrecord_files, batch_size): drop_remainder = False feature_description = { 'label': tf.io.FixedLenFeature([], tf.int64), 'ref_aa': tf.io.FixedLenFeature([], tf.int64), 'alt_aa': tf.io.FixedLenFeature([], tf.int64), 'feature': tf.io.FixedLenFeature([], tf.string), 'mask': tf.io.FixedLenFeature([], tf.string), 'var_id': tf.io.FixedLenFeature([], tf.string), } def _parser(example_proto): parsed = tf.io.parse_single_example(example_proto, feature_description) label, ref_aa, alt_aa = parsed['label'], parsed['ref_aa'], parsed[ 'alt_aa'] var_id = parsed['var_id'] ref_aa, alt_aa, label = tf.cast(ref_aa, tf.int32), tf.cast( alt_aa, tf.int32), tf.cast(label, tf.float32) feature = tf.io.decode_raw(parsed['feature'], tf.float32) feature = tf.reshape(feature, (window_size, input_feature_dim)) mask = tf.io.decode_raw(parsed['mask'], tf.float32) mask = tf.reshape(mask, (window_size, )) h = window_size // 2 #mask the postion of interest mask = tf.concat( [mask[:h], tf.cast([ 1, ], dtype=tf.float32), mask[h + 1:]], axis=-1) ''' pos_encoding = 1.0 + tf.cast( tf.math.abs(window_size // 2 - tf.range(window_size)), dtype=tf.float32) #pos_encoding = tf.math.log() / tf.math.log(2.0) feature = tf.concat([feature, pos_encoding[:, tf.newaxis]], axis=-1) ''' return var_id, ref_aa, alt_aa, feature, label, mask dataset = tf.data.TFRecordDataset(input_tfrecord_files) options = tf.data.Options() options.experimental_threading.max_intra_op_parallelism = 1 dataset = dataset.with_options(options) dataset = dataset.shuffle(2048) dataset = dataset.map(_parser, num_parallel_calls=8) dataset = dataset.batch(batch_size) #dataset = dataset.prefetch(4) return dataset def build_all_possible_missenses_dataset(tr_list, feature_dir, batch_size): amino_acid_order = 'ACDEFGHIKLMNPQRSTVWY*' def _gen_data(): for transcript_id in tr_list: feature_path = f'{feature_dir}/{transcript_id}.pickle' if not os.path.exists(feature_path): continue print(feature_path, flush=True) with open(feature_path, 'rb') as fr: feature = pickle.load(fr) L = feature.shape[0] w = window_size // 2 for aa_pos in range(L): ref_aa = int(feature[aa_pos, 0]) start = max(aa_pos - w, 0) end = min(L, aa_pos + 1 + w) var_start = start - (aa_pos - w) var_end = var_start + (end - start) var_feature = np.zeros([w * 2 + 1, feature.shape[1]]) var_feature[var_start:var_end] = feature[start:end] mask = np.ones((w * 2 + 1, ), dtype=np.float32) mask[var_start:var_end] = 0.0 mask[w] = 1.0 for alt_aa in range(20): var_id = f'{transcript_id}_{str(aa_pos+1)}_{amino_acid_order[ref_aa]}_{amino_acid_order[alt_aa]}'.encode( 'utf-8') yield var_id, np.int32(ref_aa), np.int32( alt_aa), np.float32(var_feature), np.float32(mask) dataset = tf.data.Dataset.from_generator( _gen_data, (tf.string, tf.int32, tf.int32, tf.float32, tf.float32), (tf.TensorShape(()), tf.TensorShape(()), tf.TensorShape( ()), tf.TensorShape((window_size, input_feature_dim)), tf.TensorShape((window_size, )))) options = tf.data.Options() options.experimental_threading.max_intra_op_parallelism = 1 dataset = dataset.with_options(options) #dataset = dataset.map(_parser, num_parallel_calls=8) dataset = dataset.batch(batch_size) dataset = dataset.prefetch(4) return dataset def build_test_dataset(input_tfrecord_files, batch_size): drop_remainder = False feature_description = { 'ref_aa': tf.io.FixedLenFeature([], tf.int64), 'alt_aa': tf.io.FixedLenFeature([], tf.int64), 'feature': tf.io.FixedLenFeature([], tf.string), 'mask': tf.io.FixedLenFeature([], tf.string), 'var_id': tf.io.FixedLenFeature([], tf.string), } def _parser(example_proto): parsed = tf.io.parse_single_example(example_proto, feature_description) ref_aa, alt_aa = parsed['ref_aa'], parsed['alt_aa'] var_id = parsed['var_id'] ref_aa, alt_aa = tf.cast(ref_aa, tf.int32), tf.cast(alt_aa, tf.int32) feature = tf.io.decode_raw(parsed['feature'], tf.float32) feature = tf.reshape(feature, (window_size, input_feature_dim)) mask = tf.io.decode_raw(parsed['mask'], tf.float32) mask = tf.reshape(mask, (window_size, )) h = window_size // 2 #mask the postion of interest mask = tf.concat( [mask[:h], tf.cast([ 1, ], dtype=tf.float32), mask[h + 1:]], axis=-1) return var_id, ref_aa, alt_aa, feature, mask dataset = tf.data.TFRecordDataset(input_tfrecord_files) options = tf.data.Options() options.experimental_threading.max_intra_op_parallelism = 1 dataset = dataset.with_options(options) dataset = dataset.map(_parser, num_parallel_calls=8) dataset = dataset.batch(batch_size) #dataset = dataset.prefetch(4) return dataset
en
0.167975
#mask the postion of interest pos_encoding = 1.0 + tf.cast( tf.math.abs(window_size // 2 - tf.range(window_size)), dtype=tf.float32) #pos_encoding = tf.math.log() / tf.math.log(2.0) feature = tf.concat([feature, pos_encoding[:, tf.newaxis]], axis=-1) #dataset = dataset.prefetch(4) #dataset = dataset.map(_parser, num_parallel_calls=8) #mask the postion of interest #dataset = dataset.prefetch(4)
2.293552
2
layers/eight_mile/pytorch/layers.py
dpressel/baseline
241
9820
<gh_stars>100-1000 import copy import math import logging from typing import Dict, List, Optional, Tuple, Union import os import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torch.jit as jit import torch.autograd import contextlib import glob from eight_mile.utils import listify, Offsets, is_sequence, str2bool, get_alibi_slopes from eight_mile.utils import transition_mask as transition_mask_np MASK_FALSE = False logger = logging.getLogger("mead.layers") def sequence_mask(lengths: torch.Tensor, max_len: int = -1) -> torch.Tensor: """Generate a sequence mask of shape `BxT` based on the given lengths :param lengths: A `B` tensor containing the lengths of each example :param max_len: The maximum width (length) allowed in this mask (default to None) :return: A mask """ lens = lengths.cpu() if max_len < 0: max_len_v = torch.max(lens) else: max_len_v = max_len # 1 x T row = torch.arange(0, max_len_v).type_as(lens).view(1, -1) # B x 1 col = lens.view(-1, 1) # Broadcast to B x T, compares increasing number to max mask = row < col return mask def sequence_mask_mxlen(lengths: torch.Tensor, max_len: int) -> torch.Tensor: """Generate a sequence mask of shape `BxT` based on the given lengths, with a maximum value This function primarily exists to make ONNX tracing work better :param lengths: A `B` tensor containing the lengths of each example :param max_len: The maximum width (length) allowed in this mask (default to None) :return: A mask """ lens = lengths.cpu() max_len_v = max_len # 1 x T row = torch.arange(0, max_len_v).type_as(lens).view(1, -1) # B x 1 col = lens.view(-1, 1) # Broadcast to B x T, compares increasing number to max mask = row < col return mask @torch.jit.script def truncate_mask_over_time(mask: torch.Tensor, x: torch.Tensor) -> torch.Tensor: Tout = x.shape[1] mask = mask[:, :Tout] #mask = mask.narrow(1, 0, arcs_h.shape[1]) return mask def vec_log_sum_exp(vec: torch.Tensor, dim: int) -> torch.Tensor: """Vectorized version of log-sum-exp :param vec: Vector :param dim: What dimension to operate on :return: """ max_scores, idx = torch.max(vec, dim, keepdim=True) max_scores_broadcast = max_scores.expand_as(vec) return max_scores + torch.log(torch.sum(torch.exp(vec - max_scores_broadcast), dim, keepdim=True)) def unsort_batch(batch: torch.Tensor, perm_idx: torch.Tensor) -> torch.Tensor: """Undo the sort on a batch of tensors done for packing the data in the RNN. :param batch: The batch of data batch first `[B, ...]` :param perm_idx: The permutation index returned from the torch.sort. :returns: The batch in the original order. """ # Add ones to the shape of the perm_idx until it can broadcast to the batch perm_idx = perm_idx.to(batch.device) diff = len(batch.shape) - len(perm_idx.shape) extra_dims = [1] * diff perm_idx = perm_idx.view([-1] + extra_dims) return torch.scatter(torch.zeros_like(batch), 0, perm_idx.expand_as(batch), batch) def infer_lengths(tensor, dim=1): """Infer the lengths of an input based on the idea the Offsets.PAD was used as the padding token. :param tensor: The data to infer the length of, should be either [B, T] or [T, B] :param dim: The dimension which contains the sequential signal :returns: A Tensor of shape `[B]` that has the lengths for example item in the batch """ if len(tensor.shape) != 2: raise ValueError(f"infer_lengths only works with tensors wit two dims right now, got {len(tensor.shape)}") offsets = torch.arange(1, tensor.shape[dim] + 1, device=tensor.device, dtype=tensor.dtype).unsqueeze(1 - dim) non_pad_loc = (tensor != Offsets.PAD).to(tensor.dtype) return torch.argmax(non_pad_loc * offsets, dim=dim) + 1 def tensor_and_lengths(inputs) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: """Return either the unpacked inputs (2), or a `Tuple` of the input with None TODO: this function should probably be changed to always return the lengths second. To do this, we just need a sentinel value, e.g. <PAD> (0). The problem with doing this is that it might be possible to generate <PAD> in the middle of the tensor which would make that length invalid. :param inputs: Either a sequence of the `(tensor, length)` or just the `tensor` :return: A `Tuple` of `(tensor, length)` or `(tensor, None)` """ if isinstance(inputs, (list, tuple)): in_tensor, lengths = inputs else: in_tensor = inputs lengths = None return in_tensor, lengths class VariationalDropout(nn.Module): """Inverted dropout that applies the same mask at each time step.""" def __init__(self, pdrop: float = 0.5, batch_first: bool = False): """Variational Dropout :param pdrop: the percentage to drop """ super().__init__() self.pdrop = pdrop self.batch_first = batch_first def extra_repr(self): return "p=%.1f" % self.pdrop def forward(self, input: torch.Tensor) -> torch.Tensor: if not self.training: return input # Create a mask that covers a single time step if self.batch_first: dim0 = input.size(0) dim1 = 1 else: dim0 = 1 dim1 = input.size(1) mask = torch.zeros(dim0, dim1, input.size(2)).bernoulli_(1 - self.pdrop).to(input.device) mask = mask / self.pdrop # Broadcast the mask over the sequence return mask * input class SequenceLoss(nn.Module): """Computes the loss over a sequence""" def __init__(self, LossFn: nn.Module = nn.NLLLoss, avg: str = "token"): """A class that applies a Loss function to sequence via the folding trick. :param LossFn: A loss function to apply (defaults to `nn.NLLLoss`) :param avg: A divisor to apply, valid values are `token` and `batch` """ super().__init__() self.avg = avg if avg == "token": self.crit = LossFn(ignore_index=Offsets.PAD, reduction="mean") self._norm = self._no_norm else: self.crit = LossFn(ignore_index=Offsets.PAD, reduction="sum") self._norm = self._batch_norm def _batch_norm(self, loss, inputs): return loss / inputs.size()[0] def _no_norm(self, loss, inputs): return loss def forward(self, inputs: torch.Tensor, targets: torch.Tensor) -> torch.Tensor: """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, .., C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. """ total_sz = targets.nelement() loss = self.crit(inputs.view(total_sz, -1), targets.view(total_sz)) return self._norm(loss, inputs) def extra_repr(self): return f"reduction={self.avg}" class LabelSmoothingLoss(nn.Module): def __init__(self, label_smoothing, ignore_index=0, reduction="none"): """Use Label smoothing from `Szegedy et. al., 2015`_ to temper model confidence. Implements add-gamma smoothing where the probability mass of the gold label distribution is smoothed across classes. This implementation is based on `OpenNMT-py`_ but has been adapted to not require the vocabulary size up front. .. _Szegedy et. al., 2015: https://arxiv.org/abs/1512.00567 .. _OpenNMY-py: https://github.com/OpenNMT/OpenNMT-py/blob/938a4f561b07f4d468647823fab761cfb51f21da/onmt/utils/loss.py#L194 """ if not (0.0 < label_smoothing <= 1.0): raise ValueError(f"`label_smoothing` must be between 0.0 and 1.0, got {label_smoothing}") super().__init__() self.ignore_index = ignore_index self.label_smoothing = label_smoothing self.confidence = 1.0 - label_smoothing self.reduction = reduction if reduction != "mean" else "batchmean" def forward(self, output: torch.Tensor, target: torch.Tensor) -> torch.Tensor: """ :param output: The model outputs, [B, V] :param target: The target labels, [B] """ B, V = output.size() smoothed = torch.full((B, V), self.label_smoothing / (V - 2)) smoothed[:, self.ignore_index] = 0 smoothed = torch.scatter(smoothed, 1, target.unsqueeze(1), self.confidence) smoothed = smoothed.masked_fill_((target == self.ignore_index).unsqueeze(1), 0) return F.kl_div(output, smoothed, reduction=self.reduction) def extra_repr(self): return f"label_smoothing={self.label_smoothing}" class MeanPool1D(nn.Module): """Do a mean pool while accounting for the length of a sequence """ def __init__(self, outsz, batch_first=True): """Set up pooling module :param outsz: The output dim, for dowstream access :param batch_first: Is this module batch first or time first? """ super().__init__() self.batch_first = batch_first self.reduction_dim = 1 if self.batch_first else 0 self.output_dim = outsz self.requires_length = True def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Apply mean pooling on the valid inputs :param inputs: A tuple of `(input, lengths)` :return: Pooled output """ tensor, lengths = tensor_and_lengths(inputs) # Regardless of whether the input is `[B, T, H]` or `[T, B, H]` the shape after # the sum is `[B, H]` so the lengths (of shape `[B]`) should be unsqueezed to # `[B, 1]` in order to broadcast return torch.sum(tensor, self.reduction_dim, keepdim=False) / torch.unsqueeze(lengths, -1).to(tensor.dtype).to( tensor.device ) def extra_repr(self): return f"batch_first={self.batch_first}" class MaxPool1D(nn.Module): """Do a max-pooling operation with or without a length given """ def __init__(self, outsz, batch_first=True): super().__init__() self.batch_first = batch_first self.reduction_dim = 1 if self.batch_first else 0 self.output_dim = outsz def forward(self, inputs: Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]) -> torch.Tensor: """If we are given a tuple as input, we will use the length, otherwise we will do an operation without masking :param inputs: either a tuple of `(input, lengths)` or a tensor `input` :return: A pooled tensor """ tensor, lengths = tensor_and_lengths(inputs) if lengths is not None: # If tensor = `[B, T, H]` # mask = `[B, T, 1]` # If tensor = `[T, B, H]` # mask = `[T, B, 1]` # So it will mask all the values in H past the right length mask = sequence_mask(lengths).to(tensor.device) mask = mask if self.batch_first else bth2tbh(mask) # Fill masked with very negative so it never gets selected tensor = tensor.masked_fill(mask.unsqueeze(-1) == MASK_FALSE, -1e4) dmax, _ = torch.max(tensor, self.reduction_dim, keepdim=False) return dmax def extra_repr(self) -> str: return f"batch_first={self.batch_first}" # Torch only added this module in 1.4.0, shim class GeLU(nn.Module): def __init__(self): super().__init__() def forward(self, x): return torch.nn.functional.gelu(x) #Code taken from: https://github.com/huggingface/transformers/blob/766d4bf7920213bdd8a8afb42a72719190124568/src/transformers/activations.py#L27 class Gpt2GELU(nn.Module): """ Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415 """ def forward(self, input): return 0.5 * input * (1.0 + torch.tanh(math.sqrt(2.0 / math.pi) * (input + 0.044715 * torch.pow(input, 3.0)))) def get_activation(name: str = "relu") -> nn.Module: """Get back an `nn.Module` by string name of the activation operator :param name: A string name of the operation :return: A module associated with that string """ if name is None or name == "ident": return nn.Identity() if name == "tanh": return nn.Tanh() if name == "gelu": return GeLU() if name == "hardtanh": return nn.Hardtanh() if name == "leaky_relu": return nn.LeakyReLU() if name == "prelu": return nn.PReLU() if name == "sigmoid": return nn.Sigmoid() if name == "log_sigmoid": return nn.LogSigmoid() if name == "log_softmax": return nn.LogSoftmax(dim=-1) if name == "softmax": return nn.Softmax(dim=-1) if name == "gpt2_gelu": return Gpt2GELU() return nn.ReLU() def _cat_dir(h: torch.Tensor) -> torch.Tensor: """Concat forward and backword state vectors. The shape of the hidden is `[#layers * #dirs, B, H]`. The docs say you can separate directions with `h.view(#l, #dirs, B, H)` with the forward dir being index 0 and backwards dir being 1. This means that before separating with the view the forward dir are the even indices in the first dim while the backwards dirs are the odd ones. Here we select the even and odd values and concatenate them :param h: The hidden shape as it comes back from PyTorch modules """ return torch.cat([h[0 : h.size(0) : 2], h[1 : h.size(0) : 2]], dim=-1) def concat_state_dirs(state): """Convert the bidirectional out of an RNN so the forward and backward values are a single vector.""" if isinstance(state, tuple): return tuple(_cat_dir(h) for h in state) return _cat_dir(state) class Conv1DSame(nn.Module): """Perform a 1D convolution with output size same as input size To make this operation work as expected, we cannot just use `padding=kernel_size//2` inside of the convolution operation. Instead, we zeropad the input using the `ConstantPad1d` module """ def __init__(self, in_channels: int, out_channels: int, kernel_size: int, bias: bool = True, groups: int = 1, unif: float = 0.0, initializer: Optional[str] = None, activation: Optional[str] = None): """Create a 1D conv to produce the same output size as input :param in_channels: The number of input feature maps :param out_channels: The number of output feature maps :param kernel_size: The kernel size :param bias: Is bias on? :param groups: Number of conv groups """ super().__init__() end_pad = kernel_size // 2 start_pad = end_pad - 1 if kernel_size % 2 == 0 else end_pad self.conv = nn.Sequential( nn.ConstantPad1d((start_pad, end_pad), 0.), pytorch_conv1d(in_channels, out_channels, kernel_size, unif=unif, initializer=initializer, bias=bias, groups=groups), get_activation(activation) ) def forward(self, x: torch.Tensor) -> torch.Tensor: """Do convolution1d on an input tensor, `[B, C, T]` :param x: The input tensor of shape `[B, C, T]` :return: The output tensor of shape `[B, H, T]` """ return self.conv(x) class ConvEncoder(nn.Module): """1D Convolutional layer encoder with given activation function, optional dropout This module takes in a temporal signal of either shape `[B, C, T]` or `[B, T, C]`, depending on the constructor and produces an output signal of the same orientation (`[B, H, T]` or `[B, T, H]`, respectively). We default to `[B, T, H]` orientation to make it more convenient for typical layout, but this requires transposing the last 2 dims before and after the convolution operation. """ def __init__(self, insz: int, outsz: int, filtsz: int, pdrop: float = 0.0, activation: str = "relu", bias: bool = True, groups: int = 1, hidden_last=True): """Construct the encoder with optional dropout, given activation, and orientation :param insz: The number of input feature maps :param outsz: The number of output feature maps (or hidden size) :param filtsz: The kernel size :param pdrop: The amount of dropout to apply, this defaults to 0 :param activation: The activation function by name, defaults to `relu` :param bias: Use bias? :param groups: How many conv groups. Defaults to 1 :param hidden_last: PyTorch only! If `True` the orientatiation is `[B, T, H]`, o.w. `[B, H, T]` expected """ super().__init__() self.output_dim = outsz conv = Conv1DSame(insz, outsz, filtsz, bias=bias, groups=groups) act = get_activation(activation) dropout = nn.Dropout(pdrop) if hidden_last: self.conv = nn.Sequential(BTH2BHT(), conv, act, dropout, BHT2BTH()) else: self.conv = nn.Sequential(conv, act, dropout) def forward(self, input: torch.Tensor) -> torch.Tensor: return self.conv(input) class ConvEncoderStack(nn.Module): """Create a stack of convolutional encoders with residual connections between, using the `ConvEncoder` underneath This creates an encoder stack of convolutions, finally returning the last temporal output. Each layer uses zero-padding which causes the output of the convolution at each layer to be the same length. As in the `ConvEncoder` we support input tensor shapes of `[B, C, T]` or `[B, T, C]` depending on the constructor initialization, and transpose underneath the input and output of the stack if the orientation is defaulted to `[B, T, C]` """ def __init__(self, insz: int, outsz: int, filtsz: int, nlayers: int = 1, pdrop: float = 0.0, activation: str = "relu", bias: bool = True, groups: int = 1, hidden_last=True): """Construct the encoder stack :param insz: The input number of feature maps :param outsz: The output number of feature maps :param filtsz: The kernel size :param nlayers: The number of layers in the stack (defaults to a single layer) :param pdrop: The amount of dropout to apply (defaults to `0`) :param activation: The activation function to use as a string, defaults to `relu` :param bias: Use bias? :param groups: How many conv groups. Defaults to 1 :param hidden_last: PyTorch only! If `True` the orientatiation is `[B, T, H]`, o.w. `[B, H, T]` expected """ super().__init__() if hidden_last: first_layer = nn.Sequential(BTH2BHT(), ConvEncoder(insz, outsz, filtsz, pdrop, activation, bias, groups, hidden_last=False)) else: first_layer = ConvEncoder(insz, outsz, filtsz, pdrop, activation, bias, groups, hidden_last=False) subsequent_layer = ResidualBlock(ConvEncoder(outsz, outsz, filtsz, pdrop, activation, bias, groups, hidden_last=False)) self.layers = nn.ModuleList([first_layer] + [copy.deepcopy(subsequent_layer) for _ in range(nlayers - 1)]) if hidden_last: self.layers.append(BHT2BTH()) self.output_dim = outsz def forward(self, input: torch.Tensor) -> torch.Tensor: """Apply a stack of 1D convolutions with residual connections between them :param input: A tensor of shape `[B, T, C]` or `[B, C, T]` depending on value of `hidden_last` :return: A tensor of shape `[B, T, H]` or `[B, H, T]` depending on the value of `hidden_last` """ x = input for layer in self.layers: x = layer(x) return x def bth2bht(t: torch.Tensor) -> torch.Tensor: """Transpose the 2nd and 3rd dim of a tensor""" return t.transpose(1, 2).contiguous() class BTH2BHT(nn.Module): """Utility layer to convert from `[B, T, H]` to `[B, H, T]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return bth2bht(t) def tbh2bht(t: torch.Tensor) -> torch.Tensor: """Permute the dimensions, first goes to third, second goes to first, last moves to second""" return t.permute(1, 2, 0).contiguous() class TBH2BHT(nn.Module): """Utility layer to convert from `[T, B, H]` to `[B, H, T]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return tbh2bht(t) def tbh2bth(t: torch.Tensor) -> torch.Tensor: """Transpose the first 2 dims""" return t.transpose(0, 1).contiguous() class TBH2BTH(nn.Module): """Utility layer to convert from `[T, B, H]` to `[B, T, H]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return tbh2bth(t) def bth2tbh(t: torch.Tensor) -> torch.Tensor: """Transpose the first 2 dims""" return t.transpose(0, 1).contiguous() class BTH2TBH(nn.Module): """Utility layer to convert from `[B, T, H]` to `[T, B, H]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return bth2tbh(t) def bht2bth(t: torch.Tensor) -> torch.Tensor: return t.transpose(1, 2).contiguous() class BHT2BTH(nn.Module): """Utility layer to convert from `[B, H, T]` to `[B, T, H]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return bht2bth(t) class ParallelConv(nn.Module): """Layer of parallel convolutions with varying filter sizes followed by max over time pooling This module takes an input tensor of any orientation based on its constructor, and pools its output to shape `[B, H]`, where `H` is `outsz * len(filtsz)` """ def __init__(self, insz: int, outsz: int, filtsz: List[int], activation: str = "relu", input_fmt: str = "bth"): """ Constructor for a parallel convolution from any orientation tensor input :param insz: The number of input feature maps :param outsz: The number of output feature maps :param filtsz: The kernel size as a list of parallel filters to apply, e.g. `[3, 4, 5]` :param activation: An activation function by name to apply :param input_fmt: A string for the orientation. Valid values are `bth` or `btc` meaning hidden units last, `bht` or `bct` meaning the temporal dim last or `tbh` or `tbc` meaning the hidden units last and the temporal dim first """ super().__init__() self.requires_length = False convs = [] outsz_filts = outsz self.input_fmt = input_fmt.lower() if type(outsz) == int: outsz_filts = len(filtsz) * [outsz] self.output_dim = sum(outsz_filts) for i, fsz in enumerate(filtsz): if fsz % 2 == 0: conv = Conv1DSame(insz, outsz_filts[i], fsz) else: pad = fsz // 2 conv = nn.Conv1d(insz, outsz_filts[i], fsz, padding=pad) conv = nn.Sequential( conv, get_activation(activation) ) convs.append(conv) # Add the module so its managed correctly self.convs = nn.ModuleList(convs) def transform_input(self, t: torch.Tensor) -> torch.Tensor: if self.input_fmt == "bth" or self.input_fmt == "btc": return bth2bht(t) elif self.input_fmt == "tbh" or self.input_fmt == "tbc": return tbh2bht(t) else: return t def forward(self, inputs: torch.Tensor) -> torch.Tensor: """Transform the input to `[B, C, T]` from any orientation and perform parallel 1D convs and max over time pool :param inputs: An input tensor of any format specified in the constructor :return: A `[B, H]` tensor representing the pooled outputs """ mots = [] input_bct = self.transform_input(inputs) for conv in self.convs: # In Conv1d, data BxCxT, max over time conv_out = conv(input_bct) mot, _ = conv_out.max(2) mots.append(mot) mots = torch.cat(mots, 1) return mots # self.conv_drop(mots) class Highway(nn.Module): """Highway layer as defined in https://arxiv.org/abs/1505.00387 """ def __init__(self, input_size: int, **kwargs): """Highway layer constructor :param input_size: The input hidden size :param kwargs: """ super().__init__() self.proj = nn.Linear(input_size, input_size) self.transform = nn.Linear(input_size, input_size) self.transform.bias.data.fill_(-2.0) self.output_dim = input_size def forward(self, input: torch.Tensor) -> torch.Tensor: """Take a tensor in and produce the highway layer output :param input: Input tensor :return: output tensor """ proj_result = torch.relu(self.proj(input)) proj_gate = torch.sigmoid(self.transform(input)) gated = (proj_gate * proj_result) + ((1 - proj_gate) * input) return gated def pytorch_linear(in_sz: int, out_sz: int, unif: float = 0, initializer: str = None, bias: bool = True): """Utility function that wraps a linear (AKA dense) layer creation, with options for weight init and bias""" l = nn.Linear(in_sz, out_sz, bias=bias) if unif > 0: l.weight.data.uniform_(-unif, unif) elif initializer == "ortho": nn.init.orthogonal(l.weight) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(l.weight) else: nn.init.xavier_uniform_(l.weight) if bias: l.bias.data.zero_() return l class StackedLSTMCell(nn.Module): """A stacked LSTM cells applied at a timestep """ def __init__(self, num_layers: int, input_size: int, rnn_size: int, dropout: float): super().__init__() self.dropout = nn.Dropout(dropout) self.num_layers = num_layers self.layers = nn.ModuleList() for i in range(num_layers): self.layers.append(nn.LSTMCell(input_size=input_size, hidden_size=rnn_size, bias=False)) input_size = rnn_size def forward(self, input: torch.Tensor, hidden: torch.Tensor): """Apply a stack of LSTMs :param input: The input to the first LSTM `[B, H]` :param hidden: The previous `(h, c)` where `h=(h_0, h_1,..)`, `c=(c_0, c_1,..)` :return: The output and hidden `(h, c)` where `h=(h_0, h_1,..)`, `c=(c_0, c_1,..)` """ h_0, c_0 = hidden hs, cs = [], [] for i, layer in enumerate(self.layers): h_i, c_i = layer(input, (h_0[i], c_0[i])) input = h_i if i != self.num_layers - 1: input = self.dropout(input) hs.append(h_i) cs.append(c_i) hs = torch.stack(hs) cs = torch.stack(cs) return input, (hs, cs) class StackedGRUCell(nn.Module): """A stacked GRU cells applied at a timestep """ def __init__(self, num_layers: int, input_size: int, rnn_size: int, dropout: float): super().__init__() self.dropout = nn.Dropout(dropout) self.num_layers = num_layers self.layers = nn.ModuleList() for i in range(num_layers): self.layers.append(nn.GRUCell(input_size=input_size, hidden_size=rnn_size)) input_size = rnn_size def forward(self, input: torch.Tensor, hidden: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: """Apply a stack of GRUs :param input: The input to the first LSTM `[B, H]` :param hidden: The previous `h` where `h=(h_0, h_1,..)` :return: The output and hidden `h` where `h=(h_0, h_1,..)` """ h_0 = hidden hs = [] for i, layer in enumerate(self.layers): h_i = layer(input, (h_0[i])) input = h_i if i != self.num_layers: input = self.dropout(input) hs.append(h_i) hs = torch.stack(hs) return input, hs class Dense(nn.Module): """Dense (Linear) layer with optional activation given This module is the equivalent of the tf.keras.layer.Dense, module with optional activations applied """ def __init__( self, insz: int, outsz: int, activation: Optional[str] = None, unif: float = 0, initializer: Optional[str] = None, ): """Constructor for "dense" or "linear" layer, with optional activation applied :param insz: The number of hidden units in the input :param outsz: The number of hidden units in the output :param activation: The activation function by name, defaults to `None`, meaning no activation is applied :param unif: An optional initialization value which can set the linear weights. If given, biases will init to 0 :param initializer: An initialization scheme by string name: `ortho`, `kaiming` or `he`, `xavier` or `glorot` """ super().__init__() self.layer = pytorch_linear(insz, outsz, unif, initializer) self.activation = get_activation(activation) self.output_dim = outsz def forward(self, input: torch.Tensor) -> torch.Tensor: """Run a linear projection over the input, followed by an optional activation given by constructor :param input: the input tensor :return: the transformed output """ return self.activation(self.layer(input)) class WeightTieDense(nn.Module): """Do weight tying from the input parameter This module never copies the weight pointer, it lazily accesses to allow the tied variable to reset its parameters after initialization. This is helpful for cases where we have LMs and are reloading them after they have been initially created """ def __init__(self, tie: nn.Module, bias=False): super().__init__() self.tie = tie self.transform = self._get_transform(tie) if bias: bias = torch.nn.Parameter(torch.zeros(self.transform(self.weight.shape[0]))) else: bias = None self.register_parameter("bias", bias) def _get_transform(self, tie: nn.Module): emb = getattr(tie, "embeddings", None) if emb is not None: return self._identity return self._transpose @property def weight(self): emb = getattr(self.tie, "embeddings", None) if emb is not None: return getattr(emb, "weight") return getattr(self.tie, "weight") def _identity(self, x: torch.Tensor) -> torch.Tensor: return x def _transpose(self, x: torch.Tensor) -> torch.Tensor: return x.transpose(0, 1).contiguous() def forward(self, input: torch.Tensor) -> torch.Tensor: return F.linear(input, self.transform(self.weight), self.bias) class ResidualBlock(nn.Module): """Create a residual block by wrapping an layer with a residual connection""" def __init__(self, layer: Optional[nn.Module] = None, **kwargs): """Wrap an layer with a residual connection :param layer: This layer will be applied to the input and added to the input :param kwargs: """ super().__init__() self.layer = layer if self.layer is not None and hasattr(layer, "output_dim"): self.output_dim = layer.output_dim def forward(self, input: torch.Tensor) -> torch.Tensor: """Apply a residual block :param input: A tensor to use as input and to add to output :return: The residual connection output """ return input + self.layer(input) class SkipConnection(ResidualBlock): """Subclass of ResidualBlock(Dense) with an activation function given """ def __init__(self, input_size: int, activation: str = "relu"): """Create a `SkipConnection` :param input_size: The input dimension size :param activation: A string activation name """ super().__init__(None) self.layer = Dense(input_size, input_size, activation=activation) self.output_dim = input_size def rnn_cell(insz: int, hsz: int, rnntype: str, nlayers: int, dropout: float): """This is a wrapper function around a stacked RNN cell :param insz: The input dimensions :param hsz: The hidden dimensions :param rnntype: An RNN type `gru` or `lstm` :param nlayers: The number of layers to stack :param dropout: The amount of dropout :return: """ if rnntype == "gru": rnn = StackedGRUCell(nlayers, insz, hsz, dropout) else: rnn = StackedLSTMCell(nlayers, insz, hsz, dropout) return rnn def pytorch_lstm( insz: int, hsz: int, rnntype: str, nlayers: int, dropout: float, unif: float = 0, batch_first: bool = False, initializer: str = None, ) -> torch.nn.LSTM: """Wrapper around `torch.nn.LSTM`, mainly for weight initialization options :param insz: The input dimension :param hsz: The number of hidden units :param rnntype: A string description of the type of LSTM: `bi?lstm` or `lstm` :param nlayers: The number of layers :param dropout: How much dropout to apply :param unif: if uniform initialization, what range? :param batch_first: Should we do the RNN batch first or time first :param initializer: An optional string representing a style of initialization `ortho`, `he`/`kaiming`, `xavier`/`glorot` :return: An LSTM """ if nlayers == 1: dropout = 0.0 ndir = 2 if rnntype.startswith("b") else 1 layer_hsz = hsz // ndir rnn = torch.nn.LSTM( insz, layer_hsz, nlayers, dropout=dropout, bidirectional=True if ndir > 1 else False, batch_first=batch_first ) # , bias=False) if initializer == "ortho": nn.init.orthogonal(rnn.weight_hh_l0) nn.init.orthogonal(rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(rnn.weight_hh_l0) nn.init.kaiming_uniform(rnn.weight_ih_l0) elif unif > 0: for weight in rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(rnn.weight_hh_l0) nn.init.xavier_uniform_(rnn.weight_ih_l0) return rnn class LSTMEncoderBase(nn.Module): """The LSTM encoder is a base for a set of encoders producing various outputs. All LSTM encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`) *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `LSTMEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `LSTMEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of LSTMs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per LSTM :param nlayers: The number of layers of LSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: PyTorch only! Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.LSTM(insz, hsz, nlayers, dropout=pdrop, bidirectional=False, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz # def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: # tbc, lengths = tensor_and_lengths(inputs) # packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths, batch_first=self.batch_first) # output, hidden = self.rnn(packed) # output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) # return self.output_fn(output, hidden) # def output_fn(self, output, state): # return output, self.extract_top_state(state) def extract_top_state(self, state: Tuple[torch.Tensor, torch.Tensor]) -> List[torch.Tensor]: """Get a view of the top state of shape [B, H]` :param state: :return: """ # Select the topmost state with -1 and the only direction is forward (select with 0) top = [] for s in state: top.append(s.view(self.nlayers, 1, -1, self.output_dim)[-1, 0]) return top class LSTMEncoderSequence(LSTMEncoderBase): """LSTM encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of LSTMs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class LSTMEncoderWithState(nn.Module): """LSTM encoder producing the hidden state and the output, where the input doesnt require any padding PyTorch note: This type of encoder doesnt inherit the `LSTMEncoderWithState` base """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """ :param insz: The size of the input :param hsz: The number of hidden units per LSTM :param nlayers: The number of layers of LSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param batch_first: PyTorch only! do batch first or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = False self.requires_state = True self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.LSTM(insz, hsz, nlayers, dropout=pdrop, bidirectional=False, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz def forward(self, input_and_prev_h: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param input_and_prev_h: The input at this timestep and the previous hidden unit or `None` :return: Raw `torch.nn.LSTM` output """ inputs, hidden = input_and_prev_h output, hidden = self.rnn(inputs, hidden) return output, hidden ##concat_state_dirs(hidden) class LSTMEncoderAll(LSTMEncoderBase): """LSTM encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a tuple of hidden vector `[L, B, H]` and context vector `[L, B, H]`, respectively *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H]` or `[B, H, S]` , and tuple of hidden `[L, B, H]` and context `[L, B, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, hidden class LSTMEncoderHidden(LSTMEncoderBase): """LSTM encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(hidden)[0] # TODO: this module only exists in pytorch. Do we eliminate it or put it in both? class LSTMEncoderSequenceHiddenContext(LSTMEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, self.extract_top_state(hidden) class BiLSTMEncoderBase(nn.Module): """BiLSTM encoder base for a set of encoders producing various outputs. All BiLSTM encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`). Because its bidirectional, half of the hidden units given in the constructor will be applied to the forward direction and half to the backward direction, and these will get concatenated. *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `BiLSTMEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `BiLSTMEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of LSTMs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per BiLSTM (`hsz//2` used for each direction and concatenated) :param nlayers: The number of layers of BiLSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.LSTM(insz, hsz // 2, nlayers, dropout=pdrop, bidirectional=True, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz def extract_top_state(self, state): # Select the topmost state with -1 and the only direction is forward (select with 0) return tuple(s.view(self.nlayers, 1, -1, self.output_dim)[-1, 0] for s in state) # TODO: this module only exists in pytorch. Do we eliminate it or put it in both? class BiLSTMEncoderSequenceHiddenContext(BiLSTMEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, self.extract_top_state(concat_state_dirs(hidden)) class BiLSTMEncoderAll(BiLSTMEncoderBase): """BiLSTM encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a tuple of hidden vector `[L, B, H]` and context vector `[L, B, H]`, respectively *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H] or `[B, H, S]` , and tuple of hidden `[L, B, H]` and context `[L, B, H]` """ tensor, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tensor, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, concat_state_dirs(hidden) class BiLSTMEncoderSequence(BiLSTMEncoderBase): """BiLSTM encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of LSTMs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` """ tensor, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tensor, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class BiLSTMEncoderHidden(BiLSTMEncoderBase): """BiLSTM encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs): """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tensor, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tensor, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(concat_state_dirs(hidden))[0] # TODO: Add this to TF or remove class BiLSTMEncoderHiddenContext(BiLSTMEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(concat_state_dirs(hidden)) class GRUEncoderBase(nn.Module): """The GRU encoder is a base for a set of encoders producing various outputs. All GRU encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`) *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `GRUEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `GRUEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of GRUs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per GRU :param nlayers: The number of layers of GRUs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: PyTorch only! Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.GRU(insz, hsz, nlayers, dropout=pdrop, bidirectional=False, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal_(self.rnn.weight_ih_l0) nn.init.orthogonal_(self.rnn.weight_hh_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform_(self.rnn.weight_ih_l0) nn.init.kaiming_uniform_(self.rnn.weight_hh_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_ih_l0) nn.init.xavier_uniform_(self.rnn.weight_hh_l0) self.output_dim = hsz def extract_top_state(self, state: torch.Tensor) -> torch.Tensor: return state[-1] class GRUEncoderSequence(GRUEncoderBase): """GRU encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of the sequence tensor `[T, B, H]` or `[B, T, H]` and its length, produce output sequence :param inputs: A tuple of the sequence tensor and its length :return: A sequence tensor of shape `[T, B, H]` or `[B, T, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class GRUEncoderAll(GRUEncoderBase): """GRU encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a hidden vector `[L, B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H]` or `[B, H, S]` , and a hidden tensor `[L, B, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, hidden class GRUEncoderHidden(GRUEncoderBase): """GRU encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(hidden) class BiGRUEncoderBase(nn.Module): """BiGRU encoder base for a set of encoders producing various outputs. All BiGRU encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`). Because its bidirectional, half of the hidden units given in the constructor will be applied to the forward direction and half to the backward direction, and these will get concatenated. *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `BiGRUEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `BiGRUEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of GRUs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per BiGRU (`hsz//2` used for each direction and concatenated) :param nlayers: The number of layers of BiGRUs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.GRU(insz, hsz // 2, nlayers, dropout=pdrop, bidirectional=True, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz def extract_top_state(self, state: torch.Tensor) -> torch.Tensor: # Select the topmost state with -1 and the only direction is forward (select with 0) return state[-1] # TODO: normalize across backends or remove class BiGRUEncoderSequenceHiddenContext(BiGRUEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, self.extract_top_state(_cat_dir(hidden)) class BiGRUEncoderAll(BiGRUEncoderBase): """BiGRU encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a hidden vector `[L, B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H] or `[B, H, S]` , and a hidden vector `[L, B, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, _cat_dir(hidden) class BiGRUEncoderSequence(BiGRUEncoderBase): """BiGRU encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of GRUs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class BiGRUEncoderHidden(BiGRUEncoderBase): """GRU encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs): """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(_cat_dir(hidden)) class Reduction(nn.Module): def __init__(self): super().__init__() def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: pass def set_output_dim(self, output_dims: List[int]): pass class ConcatReduction(Reduction): def __init__(self, output_dims: List[int], axis=-1, **kwargs): super().__init__() self.axis = axis self.set_output_dim(output_dims) def set_output_dim(self, output_dims: List[int]): self.output_dim = sum(output_dims) def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: return torch.cat(inputs, self.axis) class ConcatSubtractReduction(Reduction): """This reduction assumes paired input and subtracts the two to get a distance It is useful for training sentence encoders and is used, for example, in SentenceBERT For this to work we assume that the inputs are paired, and subtract them """ def __init__(self, output_dims: List[int], axis=-1, **kwargs): super().__init__() self.axis = axis self.set_output_dim(output_dims) def set_output_dim(self, output_dims: List[int]): self.output_dim = 3 * output_dims[0] def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: sub = torch.abs(inputs[0] - inputs[1]) return torch.cat([inputs[0], inputs[1], sub], self.axis) class SumReduction(Reduction): def __init__(self, output_dims: List[int], **kwargs): super().__init__() self.set_output_dim(output_dims) def set_output_dim(self, output_dims: List[int]): # We could actually project if we needed, or at least should validate self.output_dim = output_dims[0] def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: return sum(inputs) class SumLayerNormReduction(Reduction): def __init__(self, output_dims: List[int], layer_norm_eps: float = 1.0e-12, **kwargs): super().__init__() self.set_output_dim(output_dims) self.ln = nn.LayerNorm(self.output_dim, eps=layer_norm_eps) def set_output_dim(self, output_dims: List[int]): self.output_dim = output_dims[0] def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: output = sum(inputs) return self.ln(output) class EmbeddingsStack(nn.Module): def __init__( self, embeddings_dict: Dict[str, nn.Embedding], dropout_rate: float = 0.0, requires_length: bool = False, reduction: Optional[Union[str, nn.Module]] = 'concat', **kwargs, ): """Takes in a dictionary where the keys are the input tensor names, and the values are the embeddings :param embeddings_dict: dictionary of each feature embedding :param dropout_rate: The dropout rate (0.0 means no dropout, 1.0 means complete) """ super().__init__() self._keys: List[str] = [] embeddings_list = [] output_dims = [] for k, embedding in embeddings_dict.items(): embeddings_list.append(embedding) self._keys.append(k) output_dims += [embedding.get_dsz()] self.embeddings: nn.ModuleList = nn.ModuleList(embeddings_list) # TODO: should we make a registry of options? if isinstance(reduction, str): if reduction == 'sum': self.reduction = SumReduction(output_dims) elif reduction == 'sum-layer-norm': self.reduction = SumLayerNormReduction(output_dims, layer_norm_eps=kwargs.get('layer_norm_eps', 1.0e-12)) elif reduction == 'concat-subtract': self.reduction = ConcatSubtractReduction(output_dims) else: self.reduction = ConcatReduction(output_dims) else: self.reduction = reduction self.reduction.set_output_dim(output_dims) self.dsz = self.reduction.output_dim self.dropout = nn.Dropout(dropout_rate) self.requires_length = requires_length def __getitem__(self, item: str) -> nn.Module: idx = self._keys.index(item) if idx < 0: raise Exception(f"Invalid item ({item})") return self.embeddings[idx] def forward(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: """This method performs "embedding" of the inputs. The base method here then concatenates along depth dimension to form word embeddings :return: A 3-d vector where the last dimension is the concatenated dimensions of all embeddings """ all_embeddings_out = [] i = 0 for embedding in self.embeddings: k = self._keys[i] x = inputs[k] # Its a hair faster to do this than using isinstance if x.__class__ == tuple: embeddings_out = embedding(*x) else: embeddings_out = embedding(x) all_embeddings_out.append(embeddings_out) i += 1 word_embeddings = self.reduction(all_embeddings_out) return self.dropout(word_embeddings) def keys(self): return self._keys @property def output_dim(self): return self.dsz def items(self): for k, v in zip(self.keys(), self.embeddings): yield k, v class DenseStack(nn.Module): """A stack of one or more hidden layers """ def __init__( self, insz: int, hsz: Union[int, List[int]], activation: Union[str, List[str]] = "relu", pdrop_value: float = 0.5, init=None, skip_connect=False, layer_norm=False, **kwargs, ): """Stack 1 or more hidden layers, optionally (forming an MLP) :param insz: The number of input units :param hsz: The number of hidden units :param activation: The name of the activation function to use :param pdrop_value: The dropout probability :param init: The initializer :param skip_connect: whether use skip connection when insz is equal to outsz for a layer :param layer_norm: whether use layer norm in each layer """ super().__init__() hszs = listify(hsz) self.output_dim = hsz[-1] activations = listify(activation) if len(activations) == 1: activations = activations * len(hszs) if len(activations) != len(hszs): raise ValueError("Number of activations must match number of hidden sizes in a stack!") current = insz layer_stack = [] if layer_norm: layer_norm_eps = kwargs.get('layer_norm_eps', 1e-6) for hsz, activation in zip(hszs, activations): if skip_connect and current == hsz: layer = SkipConnection(current, activation) else: layer = Dense(current, hsz, activation) if layer_norm: layer = nn.Sequential(layer, nn.LayerNorm(hsz, eps=layer_norm_eps)) layer_stack.append(WithDropout(layer, pdrop_value)) current = hsz self.layer_stack = nn.Sequential(*layer_stack) self.requires_length = False def forward(self, inputs: torch.Tensor) -> torch.Tensor: """Stack 1 or more hidden layers, optionally (forming an MLP) :param inputs: The fixed representation of the model :Keyword Arguments: * *hsz* -- (``int``) The number of hidden units (defaults to `100`) :return: The final layer """ return self.layer_stack(inputs) class VectorSequenceAttention(nn.Module): def __init__(self, hsz: int): super().__init__() self.hsz = hsz self.W_c = nn.Linear(2 * self.hsz, hsz, bias=False) def forward(self, query_t, keys_bth, values_bth, keys_mask=None): # Output(t) = B x H x 1 # Keys = B x T x H # a = B x T x 1 a = self._attention(query_t, keys_bth, keys_mask) attended = self._update(a, query_t, values_bth) return attended def _attention(self, query_t, keys_bth, keys_mask): pass def _update(self, a, query_t, values_bth): # a = B x T # Want to apply over context, scaled by a # (B x 1 x T) (B x T x H) = (B x 1 x H) a = a.view(a.size(0), 1, a.size(1)) c_t = torch.bmm(a, values_bth).squeeze(1) attended = torch.cat([c_t, query_t], -1) attended = torch.tanh(self.W_c(attended)) return attended def dot_product_attention_weights(query_t: torch.Tensor, keys_bth: torch.Tensor, keys_mask: torch.Tensor) -> torch.Tensor: a = keys_bth @ query_t.unsqueeze(2) a = a.squeeze(2).masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a def dot_product_attention_weights_lengths(query_t: torch.Tensor, keys_bth: torch.Tensor, keys_lengths: torch.Tensor) -> torch.Tensor: mask = sequence_mask(keys_lengths, keys_bth.shape[1]).to(keys_bth.device) return dot_product_attention_weights(query_t, keys_bth, mask) class LuongDotProductAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) def _attention(self, query_t, keys_bth, keys_mask): return dot_product_attention_weights(query_t, keys_bth, keys_mask) class ScaledDotProductAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) def _attention(self, query_t, keys_bth, keys_mask): a = (keys_bth @ query_t.unsqueeze(2)) / math.sqrt(self.hsz) a = a.squeeze(2).masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a class LuongGeneralAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) self.W_a = nn.Linear(self.hsz, self.hsz, bias=False) def _attention(self, query_t, keys_bth, keys_mask): a = keys_bth @ self.W_a(query_t).unsqueeze(2) a = a.squeeze(2).masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a class BahdanauAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) self.hsz = hsz self.W_a = nn.Linear(self.hsz, self.hsz, bias=False) self.E_a = nn.Linear(self.hsz, self.hsz, bias=False) self.v = nn.Linear(self.hsz, 1, bias=False) def _attention(self, query_t, keys_bth, keys_mask): B, T, H = keys_bth.shape q = self.W_a(query_t.view(-1, self.hsz)).view(B, 1, H) u = self.E_a(keys_bth).view(B, T, H) z = torch.tanh(q + u) a = self.v(z.view(-1, self.hsz)).view(B, T) a = a.masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a def _update(self, a, query_t, values_bth): query_t = query_t.view(-1, self.hsz) # a = B x T # Want to apply over context, scaled by a # (B x 1 x T) (B x T x H) = (B x 1 x H) -> (B x H) a = a.view(a.size(0), 1, a.size(1)) c_t = (a @ values_bth).squeeze(1) # (B x 2H) attended = torch.cat([c_t, query_t], -1) attended = self.W_c(attended) return attended class FineTuneModel(nn.Module): def __init__(self, nc, embeddings, stack_model=None): super().__init__() if isinstance(embeddings, dict): self.finetuned = EmbeddingsStack(embeddings) else: self.finetuned = embeddings self.stack_model = stack_model output_dim = self.finetuned.output_dim if stack_model is None else stack_model.output_dim self.output_layer = Dense(output_dim, nc, activation="log_softmax") def forward(self, inputs): base_layers = self.finetuned(inputs) stacked = self.stack_model(base_layers) if self.stack_model is not None else base_layers return self.output_layer(stacked) class CompositePooling(nn.Module): """Composite pooling allows for multiple sub-modules during pooling to be used in parallel """ def __init__(self, models): """ Note, this currently requires that each submodel is an eight_mile model with an `output_dim` attr """ super().__init__() self.models = nn.ModuleList(models) self.output_dim = sum(m.output_dim for m in self.models) self.requires_length = any(getattr(m, "requires_length", False) for m in self.models) def forward(self, inputs): inputs, lengths = tensor_and_lengths(inputs) pooled = [] for sub_model in self.models: if getattr(sub_model, "requires_length", False): pooled.append(sub_model((inputs, lengths))) else: pooled.append(sub_model(inputs)) return torch.cat(pooled, -1) class EmbedPoolStackModel(nn.Module): """This provides an idiom for classification consisting of multiple phases In the first phase, we embed the input tensors, and subsequently pool them to a fixed width representation. Finally, we allow multiple hidden "stacking" layers, ultimately ending in a projection to the output space """ def __init__( self, nc: int, embeddings: nn.Module, pool_model: nn.Module, stack_model: Optional[nn.Module] = None, output_model: Optional[nn.Module] = None, ): super().__init__() self.embed_model = embeddings self.pool_model = pool_model self.stack_model = stack_model if stack_model else nn.Identity() output_dim = self.pool_model.output_dim if stack_model is None else stack_model.output_dim self.output_layer = Dense(output_dim, nc, activation="log_softmax") if output_model is None else output_model def forward(self, inputs: Dict[str, torch.Tensor]): lengths = inputs["lengths"] embedded = self.embed_model(inputs) embedded = (embedded, lengths) pooled = self.pool_model(embedded) stacked = self.stack_model(pooled) return self.output_layer(stacked) class PassThru(nn.Module): def __init__(self, input_dim): super().__init__() self.output_dim = input_dim def forward(self, inputs: torch.Tensor) -> torch.Tensor: return inputs class WithoutLength(nn.Module): """Wrapper layer to remove lengths from the input """ def __init__(self, layer: nn.Module): super().__init__() self.layer = layer self.output_dim = self.layer.output_dim if hasattr(self.layer, "output_dim") else 0 def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: return self.layer(inputs[0]) class WithDropout(nn.Module): """Wrapper for any layer that surrounds it with dropout""" def __init__(self, layer: nn.Module, pdrop: float = 0.5, variational=False, batch_first=False): """Create a dropout wrapper around the given layer :param layer: Some sort of layer :param pdrop: A dropout value """ super().__init__() self.layer = layer self.dropout = VariationalDropout(pdrop, batch_first=batch_first) if variational else nn.Dropout(pdrop) self.output_dim = self.layer.output_dim if hasattr(self.layer, "output_dim") else 0 def forward(self, inputs: torch.Tensor) -> torch.Tensor: """Apply the layer followed by dropout :param inputs: input tensor :return: output transformed by the held layer and subsequent dropout """ return self.dropout(self.layer(inputs)) class WithDropoutOnFirst(nn.Module): """Wrapper for any layer that surrounds it with dropout This exists primarily for the LSTMEncoderWithState to allow dropout on the output while passing back the hidden state """ def __init__(self, layer: nn.Module, pdrop: float = 0.5, variational=False): """Create a dropout wrapper around the given layer :param layer: Some sort of layer :param pdrop: A dropout value """ super().__init__() self.layer = layer self.dropout = VariationalDropout(pdrop) if variational else nn.Dropout(pdrop) self.output_dim = self.layer.output_dim if hasattr(self.layer, "output_dim") else 0 def forward(self, inputs: Tuple[torch.Tensor]) -> torch.Tensor: """Apply the layer followed by dropout :param inputs: input tensor :return: output transformed by the held layer and subsequent dropout """ outputs = self.layer(inputs) return self.dropout(outputs[0]), outputs[1] def transition_mask(vocab, span_type, s_idx, e_idx, pad_idx=None): """Create a mask to enforce span sequence transition constraints. Returns a Tensor with valid transitions as a 0 and invalid as a 1 for easy use with `masked_fill` """ np_mask = transition_mask_np(vocab, span_type, s_idx, e_idx, pad_idx=pad_idx) return torch.from_numpy(np_mask) == 0 @torch.jit.script def inplace_assign(data: torch.Tensor, index: torch.Tensor, new_data: torch.Tensor) -> torch.Tensor: new_data = new_data.unsqueeze(0) index = index.expand(1, new_data.size(1)) data.scatter_(0, index, new_data) return data @torch.jit.script def i2t(i: int) -> torch.Tensor: return torch.tensor(i).unsqueeze(0) @torch.jit.script def script_viterbi( unary: torch.Tensor, trans: torch.Tensor, start_idx: int, end_idx: int ) -> Tuple[torch.Tensor, torch.Tensor]: seq_len: int = unary.size(0) num_tags: int = unary.size(1) fill_value: float = -1e4 # dtype=unary.dtype fails, with prim_dtype error on torch 1.7.1 alphas = torch.full((num_tags,), fill_value, dtype=torch.float, device=unary.device) broadcast_idx = torch.full((num_tags,), start_idx, dtype=torch.long) alphas = alphas.scatter(0, broadcast_idx, torch.zeros((num_tags,))) alphas = alphas.unsqueeze(0) backpointers: torch.Tensor = torch.zeros(num_tags, dtype=torch.long).unsqueeze(0) for i in range(seq_len): unary_t = unary[i, :] next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 1) backpointers = torch.cat([backpointers, best_tag_ids.unsqueeze(0)], 0) alphas = (viterbi + unary_t).unsqueeze(0) terminal_vars = alphas.squeeze(0) + trans[end_idx, :] path_score, best_tag_id = torch.max(terminal_vars, 0) best_path = best_tag_id.unsqueeze(0) for i in range(unary.size(0)): t = seq_len - i - 1 best_tag_id = backpointers[t + 1, best_tag_id] best_path = torch.cat([best_path, best_tag_id.unsqueeze(0)], -1) new_path_vec = best_path.flip(0) return new_path_vec[1:], path_score class ViterbiBatchSize1(nn.Module): def __init__(self, start_idx: int, end_idx: int): super().__init__() self.start_idx = start_idx self.end_idx = end_idx def forward(self, unary: torch.Tensor, trans: torch.Tensor, _: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: unary = unary.squeeze(1) trans = trans.squeeze(0) path, score = script_viterbi(unary, trans, self.start_idx, self.end_idx) return path.unsqueeze(1), score class Viterbi(nn.Module): def __init__(self, start_idx: int, end_idx: int): super().__init__() self.start_idx = start_idx self.end_idx = end_idx # r, start_idx: int, end_idx: int, norm = lambda x, y: x def forward( self, unary: torch.Tensor, trans: torch.Tensor, lengths: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: """Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] :param trans: torch.FloatTensor: [1, N, N] :param norm: Callable: This function should take the initial and a dim to normalize along. :return: torch.LongTensor: [T, B] the padded paths :return: torch.FloatTensor: [B] the path scores """ seq_len, batch_size, tag_size = unary.size() min_length = torch.min(lengths) backpointers = [] # Alphas: [B, 1, N] alphas = torch.full((batch_size, 1, tag_size), -1e4, device=unary.device) alphas[:, 0, self.start_idx] = 0 # alphas = self.norm(alphas) for i, unary_t in enumerate(unary): next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 2) backpointers.append(best_tag_ids) new_alphas = viterbi + unary_t new_alphas.unsqueeze_(1) # This part generates a warning if i >= min_length: mask = (i < lengths).view(-1, 1, 1) alphas = alphas.masked_fill(mask, 0) + new_alphas.masked_fill(mask == MASK_FALSE, 0) else: alphas = new_alphas # Add end tag terminal_var = alphas.squeeze(1) + trans[:, self.end_idx, :] path_score, best_tag_id = torch.max(terminal_var, 1) # Flip lengths rev_len = seq_len - lengths - 1 best_path = [best_tag_id] for i in range(len(backpointers)): t = len(backpointers) - i - 1 backpointer_t = backpointers[t] # Get new best tag candidate new_best_tag_id = backpointer_t.gather(1, best_tag_id.unsqueeze(1)).squeeze(1) # We are going backwards now, if flipped length was passed # these you aren't in your real results yet mask = i > rev_len best_tag_id = best_tag_id.masked_fill(mask, 0) + new_best_tag_id.masked_fill(mask == MASK_FALSE, 0) best_path.append(best_tag_id) _ = best_path.pop() best_path.reverse() best_path = torch.stack(best_path) # Mask out the extra tags (This might be pointless given thathatt anything that # will use this as a dense tensor downstream will mask it itself?) seq_mask = sequence_mask(lengths, seq_len).to(best_path.device).transpose(0, 1) best_path = best_path.masked_fill(seq_mask == MASK_FALSE, 0) return best_path, path_score @torch.jit.script def script_viterbi_log_softmax_norm( unary: torch.Tensor, trans: torch.Tensor, start_idx: int, end_idx: int ) -> Tuple[torch.Tensor, torch.Tensor]: seq_len: int = unary.size(0) num_tags: int = unary.size(1) fill_value: float = -1e4 # dtype=unary.dtype fails, with prim_dtype error on torch 1.7.1 alphas = torch.full((num_tags,), fill_value, dtype=torch.float, device=unary.device) broadcast_idx = torch.full((num_tags,), start_idx, dtype=torch.long) alphas = alphas.scatter(0, broadcast_idx, torch.zeros((num_tags,))) alphas = alphas.unsqueeze(0) alphas = torch.log(F.softmax(alphas, dim=-1)) backpointers: torch.Tensor = torch.zeros(num_tags, dtype=torch.long).unsqueeze(0) for i in range(seq_len): unary_t = unary[i, :] next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 1) backpointers = torch.cat([backpointers, best_tag_ids.unsqueeze(0)], 0) alphas = (viterbi + unary_t).unsqueeze(0) terminal_vars = alphas.squeeze(0) + trans[end_idx, :] path_score, best_tag_id = torch.max(terminal_vars, 0) best_path = best_tag_id.unsqueeze(0) for i in range(unary.size(0)): t = seq_len - i - 1 best_tag_id = backpointers[t + 1, best_tag_id] best_path = torch.cat([best_path, best_tag_id.unsqueeze(0)], -1) new_path_vec = best_path.flip(0) return new_path_vec[1:], path_score class ViterbiLogSoftmaxNormBatchSize1(nn.Module): def __init__(self, start_idx: int, end_idx: int): super().__init__() self.start_idx = start_idx self.end_idx = end_idx def forward(self, unary: torch.Tensor, trans: torch.Tensor, _: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: unary = unary.squeeze(1) trans = trans.squeeze(0) path, score = script_viterbi_log_softmax_norm(unary, trans, self.start_idx, self.end_idx) return path.unsqueeze(1), score class ViterbiLogSoftmaxNorm(Viterbi): def forward( self, unary: torch.Tensor, trans: torch.Tensor, lengths: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: """Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] :param trans: torch.FloatTensor: [1, N, N] :param norm: Callable: This function should take the initial and a dim to normalize along. :return: torch.LongTensor: [T, B] the padded paths :return: torch.FloatTensor: [B] the path scores """ seq_len, batch_size, tag_size = unary.size() min_length = torch.min(lengths) backpointers = [] # Alphas: [B, 1, N] alphas = torch.full((batch_size, 1, tag_size), -1e4, device=unary.device) alphas[:, 0, self.start_idx] = 0 alphas = F.log_softmax(alphas, dim=-1) for i, unary_t in enumerate(unary): next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 2) backpointers.append(best_tag_ids) new_alphas = viterbi + unary_t new_alphas.unsqueeze_(1) if i >= min_length: mask = (i < lengths).view(-1, 1, 1) alphas = alphas.masked_fill(mask, 0) + new_alphas.masked_fill(mask == MASK_FALSE, 0) else: alphas = new_alphas # Add end tag terminal_var = alphas.squeeze(1) + trans[:, self.end_idx, :] path_score, best_tag_id = torch.max(terminal_var, 1) # Flip lengths rev_len = seq_len - lengths - 1 best_path = [best_tag_id] for i in range(len(backpointers)): t = len(backpointers) - i - 1 backpointer_t = backpointers[t] # Get new best tag candidate new_best_tag_id = backpointer_t.gather(1, best_tag_id.unsqueeze(1)).squeeze(1) # We are going backwards now, if flipped length was passed # these you aren't in your real results yet mask = i > rev_len best_tag_id = best_tag_id.masked_fill(mask, 0) + new_best_tag_id.masked_fill(mask == MASK_FALSE, 0) best_path.append(best_tag_id) _ = best_path.pop() best_path.reverse() best_path = torch.stack(best_path) # Mask out the extra tags (This might be pointless given that anything that # will use this as a dense tensor downstream will mask it itself?) seq_mask = sequence_mask(lengths, seq_len).to(best_path.device).transpose(0, 1) best_path = best_path.masked_fill(seq_mask == MASK_FALSE, 0) return best_path, path_score def ident(x): return x class TaggerGreedyDecoder(nn.Module): def __init__( self, num_tags: int, constraint_mask: Optional[torch.Tensor] = None, batch_first: bool = True, reduction: str = "batch", ): """A Greedy decoder and loss module for taggers. :param num_tags: `int` The number of output classes :param constraint_mask: `Tensor[1, N, N]` A mask with valid transitions as 1 and invalid as 0 :param batch_first: `bool` Should the batch dimensions be first? :param reduction: `str` Should the loss be calculated at the token level or batch level """ super().__init__() self.num_tags = num_tags if constraint_mask is not None: constraint_mask = F.log_softmax( torch.zeros(constraint_mask.shape).masked_fill(constraint_mask, -1e4), dim=1 ) self.register_buffer("constraint_mask", constraint_mask) else: self.constraint_mask = None # FIXME: we cant do it like this if using TorchScript self.to_batch_first = ident if batch_first else tbh2bth self.to_time_first = bth2tbh if batch_first else ident self.batch_first = batch_first self.loss = SequenceLoss(LossFn=nn.CrossEntropyLoss, avg=reduction) self.viterbi = ViterbiLogSoftmaxNorm(Offsets.GO, Offsets.EOS) @property def transitions(self): return self.constraint_mask def neg_log_loss(self, inputs, tags, lengths): unaries = self.to_batch_first(inputs) tags = self.to_batch_first(tags) return self.loss(unaries, tags) def forward(self, inputs) -> torch.Tensor: unaries, lengths = tensor_and_lengths(inputs) # If there is a constraint mask do a masked viterbi if self.constraint_mask is not None: probv = self.to_time_first(unaries) probv = F.log_softmax(probv, dim=-1) preds, scores = self.viterbi(probv, self.constraint_mask, lengths) if self.batch_first: return tbh2bth(preds) # , scores else: return preds else: # Decoding doesn't care about batch/time first _, preds = torch.max(unaries, -1) mask = sequence_mask(lengths, unaries.shape[1]).to(preds.device) # The mask gets generated as batch first mask = mask if self.batch_first else mask.transpose(0, 1) preds = preds.masked_fill(mask == MASK_FALSE, 0) return preds # , None def extra_repr(self) -> str: str_ = f"n_tags={self.num_tags}, batch_first={self.batch_first}" if self.constraint_mask is not None: str_ += ", constrained=True" return str_ class CRF(nn.Module): def __init__( self, num_tags: int, constraint_mask: Optional[torch.Tensor] = None, batch_first: bool = True, idxs: Tuple[int, int] = (Offsets.GO, Offsets.EOS), ): """Initialize the object. :param num_tags: int, The number of tags in your output (emission size) :param constraint: torch.ByteTensor, Constraints on the transitions [1, N, N] :param idxs: Tuple(int. int), The index of the start and stop symbol in emissions. :param batch_first: bool, if the input [B, T, ...] or [T, B, ...] Note: if idxs is none then the CRF adds these symbols to the emission vectors and n_tags is assumed to be the number of output tags. if idxs is not none then the first element is assumed to be the start index and the second idx is assumed to be the end index. In this case n_tags is assumed to include the start and end symbols. """ super().__init__() self.start_idx, self.end_idx = idxs self.num_tags = num_tags if constraint_mask is not None: self.register_buffer("constraint_mask", constraint_mask) else: self.constraint_mask = None self.transitions_p = nn.Parameter(torch.Tensor(1, self.num_tags, self.num_tags).zero_()) self.batch_first = batch_first self.viterbi = Viterbi(self.start_idx, self.end_idx) def extra_repr(self) -> str: str_ = "n_tags=%d, batch_first=%s" % (self.num_tags, self.batch_first) if self.constraint_mask is not None: str_ += ", constrained=True" return str_ @property def transitions(self): if self.constraint_mask is not None: return self.transitions_p.masked_fill(self.constraint_mask, -1e4) return self.transitions_p def neg_log_loss(self, unary, tags, lengths): """Neg Log Loss with a Batched CRF. :param unary: torch.FloatTensor: [T, B, N] or [B, T, N] :param tags: torch.LongTensor: [T, B] or [B, T] :param lengths: torch.LongTensor: [B] :return: torch.FloatTensor: [B] """ # Convert from [B, T, N] -> [T, B, N] if self.batch_first: unary = unary.transpose(0, 1) tags = tags.transpose(0, 1) _, batch_size, _ = unary.size() fwd_score = self._forward_alg(unary, lengths) gold_score = self.score_sentence(unary, tags, lengths) loss = fwd_score - gold_score batch_loss = torch.mean(loss) return batch_loss def score_sentence(self, unary: torch.Tensor, tags: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: """Score a batch of sentences. :param unary: torch.FloatTensor: [T, B, N] :param tags: torch.LongTensor: [T, B] :param lengths: torch.LongTensor: [B] :param min_length: torch.LongTensor: [] :return: torch.FloatTensor: [B] """ batch_size = lengths.shape[0] assert lengths.shape[0] == unary.shape[1] trans = self.transitions.squeeze(0) # [N, N] start = torch.full((1, batch_size), self.start_idx, dtype=tags.dtype, device=tags.device) # [1, B] tags = torch.cat([start, tags], 0) # [T + 1, B] # Unfold gives me all slices of size 2 (this tag next tag) from dimension T tag_pairs = tags.unfold(0, 2, 1) # Move the pair dim to the front and split it into two indices = tag_pairs.permute(2, 0, 1).chunk(2) trans_score = trans[[indices[1], indices[0]]].squeeze(0) # Pull out the values of the tags from the unary scores. unary_score = unary.gather(2, tags[1:].unsqueeze(-1)).squeeze(-1) mask = sequence_mask(lengths).transpose(0, 1).to(tags.device) scores = unary_score + trans_score scores = scores.masked_fill(mask == MASK_FALSE, 0) scores = scores.sum(0) eos_scores = trans[self.end_idx, tags.gather(0, lengths.unsqueeze(0)).squeeze(0)] scores = scores + eos_scores return scores def _forward_alg(self, unary: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: """For CRF forward on a batch. :param unary: torch.FloatTensor: [T, B, N] :param lengths: torch.LongTensor: [B] :return: torch.FloatTensor: [B] """ # alphas: [B, 1, N] min_length = torch.min(lengths) batch_size = lengths.shape[0] lengths.shape[0] == unary.shape[1] alphas = torch.full((batch_size, 1, self.num_tags), -1e4, device=unary.device) alphas[:, 0, self.start_idx] = 0.0 # alphas.requires_grad = True trans = self.transitions # [1, N, N] for i, unary_t in enumerate(unary): # unary_t: [B, N] unary_t = unary_t.unsqueeze(2) # [B, N, 1] # Broadcast alphas along the rows of trans # Broadcast trans along the batch of alphas # [B, 1, N] + [1, N, N] -> [B, N, N] # Broadcast unary_t along the cols of result # [B, N, N] + [B, N, 1] -> [B, N, N] scores = alphas + trans + unary_t new_alphas = vec_log_sum_exp(scores, 2).transpose(1, 2) # If we haven't reached your length zero out old alpha and take new one. # If we are past your length, zero out new_alpha and keep old one. if i >= min_length: mask = (i < lengths).view(-1, 1, 1) alphas = alphas.masked_fill(mask, 0) + new_alphas.masked_fill(mask == MASK_FALSE, 0) else: alphas = new_alphas terminal_vars = alphas + trans[:, self.end_idx] alphas = vec_log_sum_exp(terminal_vars, 2) return alphas.view(batch_size) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: unary, lengths = inputs if self.training: if self.batch_first: unary = unary.transpose(0, 1) forward = self._forward_alg(unary, lengths) # if self.batch_first: # forward = forward.transpose(0, 1) return forward with torch.no_grad(): return self.decode(unary, lengths)[0] @jit.export def decode(self, unary: torch.Tensor, lengths: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: """Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] or [B, T, N] :param lengths: torch.LongTensor: [B] :return: torch.LongTensor: [B] the paths :return: torch.FloatTensor: [B] the path score """ if self.batch_first: unary = unary.transpose(0, 1) trans = self.transitions # [1, N, N] path, score = self.viterbi(unary, trans, lengths) if self.batch_first: path = path.transpose(0, 1) return path, score class SequenceModel(nn.Module): def __init__(self, nc: int, embeddings: nn.Module, transducer: nn.Module, decoder: Optional[nn.Module] = None): super().__init__() self.embed_model = embeddings self.transducer_model = transducer # TODO: make this a separate model! if transducer.output_dim != nc: self.proj_layer = Dense(transducer.output_dim, nc) else: self.proj_layer = nn.Identity() self.decoder_model = decoder def transduce(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: lengths = inputs["lengths"] embedded = self.embed_model(inputs) embedded = (embedded, lengths) # transduced = self.transducer_model(embedded) transduced = self.proj_layer(self.transducer_model(embedded)) return transduced def decode(self, transduced: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: return self.decoder_model((transduced, lengths)) def forward(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: pass class TagSequenceModel(SequenceModel): def __init__(self, nc: int, embeddings: nn.Module, transducer: nn.Module, decoder: Optional[nn.Module] = None): decoder_model = CRF(nc, batch_first=True) if decoder is None else decoder super().__init__(nc, embeddings, transducer, decoder_model) def neg_log_loss(self, unary: torch.Tensor, tags: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: return self.decoder_model.neg_log_loss(unary, tags, lengths) def forward(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: transduced = self.transduce(inputs) path = self.decode(transduced, inputs["lengths"]) return path class LangSequenceModel(nn.Module): def __init__( self, nc: int, embeddings: nn.Module, transducer: nn.Module, decoder: Optional[nn.Module] = None, name: Optional[str] = None, ): super().__init__() self.embed_model = embeddings self.transducer_model = transducer if hasattr(transducer, "requires_state") and transducer.requires_state: self._call = self._call_with_state self.requires_state = True else: self._call = self._call_without_state self.requires_state = False self.output_layer = nn.Linear(self.transducer_model.output_dim, nc) self.decoder_model = decoder def forward(self, inputs: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: return self._call(inputs) def _call_with_state(self, inputs: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: h = inputs["h"] embedded = self.embed_model(inputs) transduced, hidden = self.transducer_model((embedded, h)) transduced = self.output_layer(transduced) return transduced, hidden def _call_without_state(self, inputs: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: embedded = self.embed_model(inputs) transduced = self.transducer_model((embedded, None)) transduced = self.output_layer(transduced) return transduced, None def pytorch_embedding(weights: torch.Tensor, finetune: bool = True) -> nn.Embedding: """Creation function for making an nn.Embedding with the given weights :param weights: The weights to use :param finetune: Should we fine-tune the embeddings or freeze them """ lut = nn.Embedding(weights.shape[0], weights.shape[1], padding_idx=Offsets.PAD) del lut.weight lut.weight = nn.Parameter(torch.FloatTensor(weights), requires_grad=finetune) return lut def subsequent_mask(size: int): """ Creates a lower triangular mask to mask future :param size: Temporal length :return: A tensor of type `uint8` that is 1s along diagonals and below, zero o.w """ attn_shape = (1, 1, size, size) sub_mask = np.tril(np.ones(attn_shape)).astype("uint8") return torch.from_numpy(sub_mask) class SequenceSequenceAttention(nn.Module): def __init__(self, hsz: int = None, pdrop: float = 0.1, **kwargs): super().__init__() self.hsz = hsz self.dropout = nn.Dropout(pdrop) self.attn = None def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: query, key, value, mask = qkvm a = self._attention(query, key, mask) self.attn = a a = self.dropout(a) return self._update(a, value) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: pass def _update(self, a: torch.Tensor, value: torch.Tensor) -> torch.Tensor: """Attention weights are applied for each value, but in a series of efficient matrix operations. In the case of self-attention, the key and query (used to create the attention weights) and values are all low order projections of the same input. :param a: The attention weights [B, H, T_q, T_k] :param values: The values [B, H, T_k, D] :returns: A tensor of shape [B, H, T_q, D] """ return torch.matmul(a, value) class SeqScaledDotProductAttention(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Scaled dot product attention, as defined in https://arxiv.org/abs/1706.03762 We apply the query to the keys to receive our weights via softmax in a series of efficient matrix operations. In the case of self-attention the key and query are all low order projections of the same input. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SeqScaledDotProductAttentionALiBi(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads slopes = torch.tensor(get_alibi_slopes(self.num_heads)) self.register_buffer("slopes", slopes) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Attention with Linear Biases, defined in https://arxiv.org/pdf/2108.12409.pdf :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) T_k = scores.shape[-1] T_q = scores.shape[-2] offsets = - torch.abs(torch.arange(T_q).view(-1, 1) - torch.arange(T_k).view(1, -1)).to(self.slopes.device) # [T_q, T_k] alibi = self.slopes.unsqueeze(-1).unsqueeze(-1) * offsets.unsqueeze(0) # [H, T_q, T_k] alibi = alibi.unsqueeze(0) # [1, H, T_q, T_k] scores += alibi if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SeqScaledDotProductAttentionT5(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, bidirectional=True, num_buckets=32, max_distance=128, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads self.bidirectional = bidirectional self.num_buckets = num_buckets self.max_distance = max_distance rel_embedding = torch.nn.init.kaiming_normal_(torch.empty((self.num_heads, self.num_buckets), dtype=torch.float), nonlinearity='linear') self.rel_embedding = nn.Parameter(rel_embedding, requires_grad=True) def _relative_position_bucket(self, relative_position): """Taken from https://github.com/tensorflow/mesh/blob/bbb6ce7917e2a8ef1f3dc6990fcacd4f3b075acd/mesh_tensorflow/transformer/transformer_layers.py#L1014 """ ret = 0 n = -relative_position num_buckets = self.num_buckets if self.bidirectional: num_buckets //= 2 ret += torch.lt(n, 0).to(dtype=torch.long) * num_buckets n = torch.abs(n).to(dtype=torch.long) else: n = torch.maximum(n, 0).to(dtype=torch.long) # now n is in the range [0, inf) max_exact = num_buckets // 2 is_small = torch.lt(n, max_exact) val_if_large = max_exact + ( torch.log(n.to(dtype=torch.float32) / max_exact) / math.log(self.max_distance / max_exact) * (num_buckets - max_exact)).to(dtype=torch.long) val_if_large = torch.minimum(val_if_large, torch.tensor(num_buckets - 1)) ret += torch.where(is_small, n, val_if_large) return ret def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Relative Attention described in https://arxiv.org/abs/1910.10683 :param query: a query for alignment. :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) T_k = scores.shape[-1] T_q = scores.shape[-2] memory_position = torch.arange(T_k).view(1, -1) query_position = torch.arange(T_q).view(-1, 1) relative_position = memory_position - query_position rp_bucket = self._relative_position_bucket(relative_position) relative_attention_bias = self.rel_embedding[:, rp_bucket] scores += relative_attention_bias if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SeqDotProductAttention(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: scores = torch.matmul(query, key.transpose(-2, -1)) if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) class SeqDotProductAttentionALiBi(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads slopes = torch.tensor(get_alibi_slopes(self.num_heads)) self.register_buffer("slopes", slopes) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: scores = torch.matmul(query, key.transpose(-2, -1)) T_k = scores.shape[-1] T_q = scores.shape[-2] offsets = - torch.abs(torch.arange(T_q).view(1, -1) - torch.arange(T_k).view(-1, 1)).to(self.slopes.device) # [T_q, T_k] alibi = self.slopes.unsqueeze(-1).unsqueeze(-1) * offsets.unsqueeze(0) # [H, T_q, T_k] alibi = alibi.unsqueeze(0) # [1, H, T_q, T_k] scores += alibi if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) class SeqDotProductAttentionT5(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, bidirectional=True, num_buckets=32, max_distance=128, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads self.bidirectional = bidirectional self.num_buckets = num_buckets self.max_distance = max_distance rel_embedding = torch.nn.init.kaiming_normal_(torch.empty((self.num_heads, self.num_buckets), dtype=torch.float), nonlinearity='linear') self.rel_embedding = nn.Parameter(rel_embedding, requires_grad=True) def _relative_position_bucket(self, relative_position): """Taken from https://github.com/tensorflow/mesh/blob/bbb6ce7917e2a8ef1f3dc6990fcacd4f3b075acd/mesh_tensorflow/transformer/transformer_layers.py#L1014 """ ret = 0 n = -relative_position num_buckets = self.num_buckets if self.bidirectional: num_buckets //= 2 ret += torch.lt(n, 0).to(dtype=torch.long) * num_buckets n = torch.abs(n).to(dtype=torch.long) else: n = torch.maximum(n, 0).to(dtype=torch.long) # now n is in the range [0, inf) max_exact = num_buckets // 2 is_small = torch.lt(n, max_exact) val_if_large = max_exact + ( torch.log(n.to(dtype=torch.float32) / max_exact) / math.log(self.max_distance / max_exact) * (num_buckets - max_exact)).to(dtype=torch.long) val_if_large = torch.minimum(val_if_large, torch.tensor(num_buckets - 1)) ret += torch.where(is_small, n, val_if_large) return ret def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Relative Attention described in https://arxiv.org/abs/1910.10683 :param query: a query for alignment. :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) scores = torch.matmul(query, key.transpose(-2, -1)) T_k = scores.shape[-1] T_q = scores.shape[-2] memory_position = torch.arange(T_k).view(1, -1) query_position = torch.arange(T_q).view(-1, 1) relative_position = memory_position - query_position rp_bucket = self._relative_position_bucket(relative_position) relative_attention_bias = self.rel_embedding[:, rp_bucket] scores += relative_attention_bias if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SequenceSequenceRelativeAttention(nn.Module): """This form of attention is specified in Shaw et al 2018: https://www.aclweb.org/anthology/N18-2074.pdf """ def __init__(self, hsz: int = None, pdrop: float = 0.1, **kwargs): super().__init__() self.hsz = hsz self.dropout = nn.Dropout(pdrop) self.attn = None def forward( self, q_k_v_ek_ev_m: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor] ) -> torch.Tensor: """Take in a tuple of tensors corresponding to the query, key, value, edges_key, edges_value and mask variables :param q_k_v_ek_ev_m: A tuple consisting of query, key, value, `edges_key`, `edges_value` and `mask` respectively :return: An updated value Tensor """ query, key, value, edges_key, edges_value, mask = q_k_v_ek_ev_m a = self._attention(query, key, edges_key, mask) self.attn = a a = self.dropout(a) return self._update(a, value, edges_value) def _attention( self, query: torch.Tensor, key: torch.Tensor, edges_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: pass def _update(self, a: torch.Tensor, value: torch.Tensor, edges_value: torch.Tensor) -> torch.Tensor: """Attention weights are applied for each value, but in a series of efficient matrix operations. In the case of self-attention, the key and query (used to create the attention weights) and values are all low order projections of the same input. :param a: The attention weights [B, H, T_q, T_k] :param value: The values [B, H, T_k, D] :param edge_value: The edge values [T_q, T_k, D] :returns: A tensor of shape [B, H, T, D] """ B, H, T_k, D = value.shape updated_values = torch.matmul(a, value) # [B, H, T_q, D] if edges_value is not None: a = a.view(B * H, -1, T_k).transpose(0, 1) # (T_q, BxH, T_k) t = torch.matmul(a, edges_value) # (T_q, BxH, D) update_edge_values = t.transpose(0, 1).view(B, H, -1, D) return updated_values + update_edge_values else: return updated_values class SeqScaledDotProductRelativeAttention(SequenceSequenceRelativeAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention( self, query: torch.Tensor, key: torch.Tensor, edges_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: """Scaled dot product attention, as defined in https://arxiv.org/abs/1706.03762 We apply the query to the keys to receive our weights via softmax in a series of efficient matrix operations. In the case of self-attntion the key and query are all low order projections of the same input. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :param edges_key: a matrix of relative embeddings between each word in a sequence [T_q x T_k x D] :return: A tensor that is (B x H x T_q x T_k) """ B, H, T_q, d_k = query.shape # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) scores_qk = torch.matmul(query, key.transpose(-2, -1)) tbhd = query.reshape(B * H, T_q, d_k).transpose(0, 1) # [T_q, B*H, d_k] scores_qek = torch.matmul(tbhd, edges_key.transpose(-2, -1)) # [T_q, B*H, T_k] scores_qek = scores_qek.transpose(0, 1).view(B, H, T_q, -1) # [B, H, T_q, T_k] scores = (scores_qk + scores_qek) / math.sqrt(d_k) # only for cross-attention T_q != T_k. for such case, mask should be src_mask, which is a sequence_mask with # dimension [B, 1, 1, T_k], and will be broadcast to dim of scores: if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) class SeqDotProductRelativeAttention(SequenceSequenceRelativeAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention( self, query: torch.Tensor, key: torch.Tensor, edges_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: B, H, T_q, d_k = query.shape scores_qk = torch.matmul(query, key.transpose(-2, -1)) tbhd = query.reshape(B * H, T_q, d_k).transpose(0, 1) scores_qek = torch.matmul(tbhd, edges_key.transpose(-2, -1)) scores_qek = scores_qek.transpose(0, 1).view(B, H, T_q, -1) scores = scores_qk + scores_qek if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) def unfold_tensor(tensor, dim, window_sz): """Unfold a tensor by applying a sliding window on a certain dimension with step 1 and padding of 0's. The window dimension is added as the last dimension :param tensor: the tensor to be unfolded, with shape [d_1, d_2, ..., T, ..., d_n] :param dim: the dimension along which unfolding is applied :param window_sz: sliding window size, need to be an odd number :return: the unfolded tensor with shape [d_1, d_2, ..., T, ..., d_n, window_sz] """ half_window = (window_sz - 1) // 2 if dim < 0: dim = len(tensor.shape) + dim # torch.nn.functional.pad apply backwardly from the last dimension padding = [0, 0] * (len(tensor.shape) - dim - 1) + [half_window, half_window] return F.pad(tensor, padding).unfold(dim, window_sz, 1) class SeqScaledWindowedRelativeAttention(SequenceSequenceRelativeAttention): """This class implements windowed relative attention, i.e. preventing attention beyond rpr_k. For efficiency, _attention and _update are implemented in a different way.""" def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _unfold_mask(self, mask, batchsz, rpr_k): """Transform mask into the unfolded format.""" window_sz = 2 * rpr_k + 1 T = mask.shape[3] if mask.shape[2] > 1: # mask is from a subsequent mask, with [1, 1, T, T] or [B, 1, T, T] logger.warning("Using subsequent mask with long sequence may cause OOM error.") mask = mask.expand(batchsz, 1, T, T) # expand sequence/subsequent mask into a uniform dim mask = F.pad(mask, [rpr_k, rpr_k]) # pad both sides with rpr_k, [B, 1, T, T + 2*rpr_k] seq = torch.arange(T + 2 * rpr_k) indices = seq.unfold(0, window_sz, 1) # indices of a sliding window, [T, W] indices = indices.unsqueeze(0).unsqueeze(0).expand(batchsz, 1, T, window_sz).to(mask.device) return torch.gather(mask, -1, indices) # [B, 1, T, W]): else: # mask is a sequence mask [B, 1, 1, T] unfolded = unfold_tensor(mask, dim=-1, window_sz=window_sz) # [B, 1, 1, T, W] return unfolded.squeeze(1) # [B, 1, T, W] def _attention( self, query: torch.Tensor, key: torch.Tensor, rpr_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: """Implementation of attention considering RA masking: using torch.Tensor.unfold to create an extra dimension representing the sliding window. Then when applying matmul, Q, K, V share the same T dimension. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :param rpr_key: tensor of the rpr_key embeddings [W, d_k] :return: A tensor that is [B, H, T, 1, W] to be matmul with values """ B, H, T, d_k = query.shape window_sz = rpr_key.shape[0] rpr_k = (window_sz - 1) // 2 query = query.unsqueeze(-2) # [B, H, T, 1, d_k] key = unfold_tensor(key, dim=2, window_sz=window_sz) # [B, H, T, d_k, W] rpr_key = rpr_key.transpose(0, 1).unsqueeze(0).unsqueeze(0).unsqueeze(0) # [1, 1, 1, d_k, W] scores_qk = torch.matmul(query, key) # [B, H, T, 1, W] scores_qrk = torch.matmul(query, rpr_key) # [B, H, T, 1, W] scores = (scores_qk + scores_qrk) / math.sqrt(d_k) if mask is not None: mask = self._unfold_mask(mask, B, rpr_k).unsqueeze(-2) # [B, 1, T, 1, W] scores = scores.masked_fill(mask == False, -1e9) return F.softmax(scores, dim=-1) def _update(self, a: torch.Tensor, value: torch.Tensor, rpr_value: torch.Tensor) -> torch.Tensor: # a has dim [B, H, T, 1, W] window_sz = a.shape[-1] value = unfold_tensor(value, dim=2, window_sz=window_sz).transpose(-1, -2) # [B, H, T, W, d_value] updated_values = torch.matmul(a, value) # [B, H, T, 1, d_value] if rpr_value is not None: rpr_value = rpr_value.unsqueeze(0).unsqueeze(0).unsqueeze(0) # [1, 1, 1, W, d_value] update_rpr_values = torch.matmul(a, rpr_value) # [B, H, T, 1, d_value] return (updated_values + update_rpr_values).squeeze(3) # [B, H, T, d_value] else: return updated_values.squeeze(3) class SeqBahdanauAttention(SequenceSequenceAttention): def __init__(self, hsz: int, pdrop: float = 0.1, **kwargs): super().__init__(hsz, pdrop=pdrop, **kwargs) self.V = pytorch_linear(self.hsz, 1, bias=False) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: # [B, H, T, 1, D] + [B, H, 1, T, D] = [B, H, T, T, D] additive = query.unsqueeze(-2) + key.unsqueeze(-3) non_linear = torch.tanh(additive) # [B, H, T, T, D] @ [D, 1] = [B, H, T, T, 1] scores = self.V(non_linear) # [B, H, T, T] scores = scores.squeeze(-1) return F.softmax(scores, dim=-1) class MultiHeadedAttention(nn.Module): """ Multi-headed attention from https://arxiv.org/abs/1706.03762 via http://nlp.seas.harvard.edu/2018/04/03/attention.html Multi-headed attention provides multiple looks of low-order projections K, Q and V using an attention function (specifically `scaled_dot_product_attention` in the paper. This allows multiple relationships to be illuminated via attention on different positional and representational information from each head. The number of heads `h` times the low-order projection dim `d_k` is equal to `d_model` (which is asserted upfront). This means that each weight matrix can be simply represented as a linear transformation from `d_model` to `d_model`, and partitioned into heads after the fact. Finally, an output projection is applied which brings the output space back to `d_model`, in preparation for the sub-sequent `FFN` sub-layer. There are 3 uses of multi-head attention in the Transformer. For encoder-decoder layers, the queries come from the previous decoder layer, and the memory keys come from the encoder. For encoder layers, the K, Q and V all come from the output of the previous layer of the encoder. And for self-attention in the decoder, K, Q and V all come from the decoder, but here it is masked to prevent using future values """ def __init__( self, num_heads: int, d_model: int, dropout: float = 0.1, scale: bool = False, d_k: Optional[int] = None, ra_type: Optional[str] = None, ): """Constructor for multi-headed attention :param h: The number of heads :param d_model: The model hidden size :param dropout (``float``): The amount of dropout to use :param scale: Should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly :param ra_type: If there is an attention bias term, that will be encapsulated in the attention computation """ super().__init__() if d_k is None: self.d_k = d_model // num_heads if d_model % num_heads != 0: raise Exception(f"d_model ({d_model}) must be evenly divisible by num_heads ({num_heads})") else: self.d_k = d_k self.h = num_heads # for multi-headed attention, w_V projects to h heads, each head has dim d_k; for single headed attention, w_V # project to 1 head with dim d_model if self.h > 1: self.d_value = self.d_k else: self.d_value = d_model self.w_Q = Dense(d_model, self.d_k * self.h) self.w_K = Dense(d_model, self.d_k * self.h) self.w_V = Dense(d_model, self.d_value * self.h) if self.h > 1: # w_O is not needed for single headed attention self.w_O = Dense(self.d_k * self.h, d_model) if scale: if ra_type == 'alibi': self.attn_fn = SeqScaledDotProductAttentionALiBi(dropout, num_heads=num_heads) elif ra_type == 't5': # TODO: pass through options self.attn_fn = SeqScaledDotProductAttentionT5(dropout, num_heads=num_heads) else: self.attn_fn = SeqScaledDotProductAttention(dropout) else: if ra_type == 'alibi': self.attn_fn = SeqDotProductAttentionALiBi(dropout, num_heads=num_heads) elif ra_type == 't5': # TODO: pass through options self.attn_fn = SeqDotProductAttentionT5(dropout, num_heads=num_heads) else: self.attn_fn = SeqDotProductAttention(dropout) self.attn = None def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """Low-order projections of query, key and value into multiple heads, then attention application and dropout :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: Multi-head attention output, result of attention application to sequence (B, T, d_model) """ query, key, value, mask = qkvm batchsz = query.size(0) # (B, H, T, D) query = self.w_Q(query).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) key = self.w_K(key).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) value = self.w_V(value).view(batchsz, -1, self.h, self.d_value).transpose(1, 2) x = self.attn_fn((query, key, value, mask)) self.attn = self.attn_fn.attn x = x.transpose(1, 2).contiguous().view(batchsz, -1, self.h * self.d_value) if self.h > 1: return self.w_O(x) else: return x class MultiHeadedRelativeAttention(nn.Module): """ Multi-headed relative attention from Shaw et al 2018 (https://www.aclweb.org/anthology/N18-2074.pdf) This method follows the same approach of MultiHeadedAttention, but it computes Relative Position Representations (RPR) which are used as part of the attention computations. To facilitate this, the model has its own internal embeddings lookup table, and it has an updated computation for both the attention weights and the application of those weights to follow them. """ def __init__( self, num_heads: int, d_model: int, rpr_k: int, dropout: float = 0.1, scale: bool = False, d_k: Optional[int] = None, windowed_ra: bool = False, rpr_value_on: bool = True ): """Constructor for multi-headed attention :param num_heads: The number of heads :param d_model: The model hidden size :param rpr_k: distance within which relative positional embedding will be considered :param windowed_ra: whether prevent attention beyond rpr_k :param dropout (``float``): The amount of dropout to use :param scale: Should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly """ super().__init__() if d_k is None: self.d_k = d_model // num_heads if d_model % num_heads != 0: raise Exception(f"d_model ({d_model}) must be evenly divisible by num_heads ({num_heads})") else: self.d_k = d_k self.h = num_heads # for multi-headed attention, w_V projects to h heads, each head has dim d_k; for single headed attention, w_V # project to 1 head with dim d_model if self.h > 1: self.d_value = self.d_k else: self.d_value = d_model self.rpr_k = rpr_k self.rpr_value_on = rpr_value_on self.rpr_key = nn.Embedding(2 * rpr_k + 1, self.d_k) if self.rpr_value_on: self.rpr_value = nn.Embedding(2 * rpr_k + 1, self.d_value) self.windowed_ra = windowed_ra self.w_Q = Dense(d_model, self.d_k * self.h) self.w_K = Dense(d_model, self.d_k * self.h) self.w_V = Dense(d_model, self.d_value * self.h) if self.h > 1: # w_O is not needed for sinlge headed attention self.w_O = Dense(self.d_k * self.h, d_model) if scale: if windowed_ra: self.attn_fn = SeqScaledWindowedRelativeAttention(dropout) else: self.attn_fn = SeqScaledDotProductRelativeAttention(dropout) else: self.attn_fn = SeqDotProductRelativeAttention(dropout) self.attn = None def make_rpr(self, q_len, k_len, device) -> Tuple[torch.Tensor, torch.Tensor]: """Create a matrix shifted by self.rpr_k and bounded between 0 and 2*self.rpr_k to provide 0-based indexing for embedding """ q_seq = torch.arange(q_len).to(device) k_seq = torch.arange(k_len).to(device) window_len = 2 * self.rpr_k edges = k_seq.view(1, -1) - q_seq.view(-1, 1) + self.rpr_k # [q_len, k_len] edges = torch.clamp(edges, 0, window_len) if self.rpr_value_on: return self.rpr_key(edges), self.rpr_value(edges) # [q_len, k_len, d_k] else: return self.rpr_key(edges), None def make_windowed_rpr(self, device): window_len = 2 * self.rpr_k + 1 window = torch.arange(window_len).to(device) if self.rpr_value_on: return self.rpr_key(window), self.rpr_value(window) else: return self.rpr_key(window), None def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """Low-order projections of query, key and value into multiple heads, then attention application and dropout :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: Multi-head attention output, result of attention application to sequence (B, T, d_model) """ query, key, value, mask = qkvm batchsz = query.size(0) query_len = query.size(1) key_len = key.size(1) # key and value have the same length, but query can have a different length # (B, H, T, D) query = self.w_Q(query).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) key = self.w_K(key).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) value = self.w_V(value).view(batchsz, -1, self.h, self.d_value).transpose(1, 2) if self.windowed_ra: rpr_key, rpr_value = self.make_windowed_rpr(query.device) else: rpr_key, rpr_value = self.make_rpr(query_len, key_len, query.device) x = self.attn_fn((query, key, value, rpr_key, rpr_value, mask)) self.attn = self.attn_fn.attn x = x.transpose(1, 2).contiguous().view(batchsz, -1, self.h * self.d_value) if self.h > 1: return self.w_O(x) else: return x class TransformerEncoderBase(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, activation_type: str = "gelu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, ra_type: Optional[str] = None, **kwargs, ): super().__init__() self.d_model = d_model self.d_ff = d_ff if d_ff is not None else 4 * d_model if rpr_k is not None and rpr_k != 0: self.self_attn = MultiHeadedRelativeAttention(num_heads, d_model, rpr_k, pdrop, scale, d_k=d_k, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on) else: self.self_attn = MultiHeadedAttention(num_heads, d_model, pdrop, scale=scale, d_k=d_k, ra_type=ra_type) self.ffn = nn.Sequential( Dense(self.d_model, self.d_ff), get_activation(activation_type), nn.Dropout(ffn_pdrop), Dense(self.d_ff, self.d_model), ) self.ln1 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.ln2 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.dropout = nn.Dropout(pdrop) class PreLNTransformerEncoder(TransformerEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: `(x, mask)` :return: The output tensor """ x, mask = inputs h = self.ln1(x) x = x + self.dropout(self.self_attn((h, h, h, mask))) x = x + self.dropout(self.ffn(self.ln2(x))) return x class PreLNBeforeResConnTransformerEncoder(TransformerEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: `(x, mask)` :return: The output tensor """ x, mask = inputs x = self.ln1(x) h = self.self_attn((x, x, x, mask)) x = x + self.dropout(h) x = self.ln2(x) x = x + self.dropout(self.ffn(x)) return x class PostLNTransformerEncoder(TransformerEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: `(x, mask)` :return: The output tensor """ x, mask = inputs h = self.self_attn((x, x, x, mask)) x = x + self.dropout(h) x = self.ln2(x) x = x + self.dropout(self.ffn(x)) x = self.ln1(x) return x class SpatialGatingUnit(nn.Module): """Spatial gating unit There are 2 ways we can look at this unit, as an MLP or a Conv with kernel length 1 l = nn.Linear(T, T) c = nn.Conv1d(T, T, 1) l(x.transpose(1, 2)).transpose(1, 2) c(x) """ def __init__(self, d_ffn: int, nctx: int, layer_norm_eps: float = 1.0e-6): super().__init__() self.norm = nn.LayerNorm(d_ffn // 2, eps=layer_norm_eps) self.proj = pytorch_conv1d(nctx, nctx, 1) nn.init.constant_(self.proj.bias, 1.0) def split(self, x): u, v = x.chunk(2, dim=-1) return u, v def forward(self, x): u, v = self.split(x) v = self.norm(v) v = self.proj(v) return u * v class GatedMLPEncoder(nn.Module): """Following https://arxiv.org/pdf/2105.08050.pdf """ def __init__( self, d_model: int, pdrop: float, nctx: int = 256, activation_type: str = "gelu", d_ff: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6 ): super().__init__() self.d_model = d_model self.d_ff = d_ff if d_ff is not None else 4 * d_model self.to_ffn = Dense(self.d_model, self.d_ff) self.activation = get_activation(activation_type) self.ffn_drop = nn.Dropout(ffn_pdrop) self.from_sgu = Dense(self.d_ff//2, self.d_model) self.norm = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.dropout = nn.Dropout(pdrop) self.spatial_gating_unit = SpatialGatingUnit(self.d_ff, nctx, layer_norm_eps) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Do gMLP forward TODO: we arent using the mask ATM :param inputs: `(x, mask)` :return: The output tensor """ # The shortcut here happens pretty early shortcut, mask = inputs # A "channel" norm x = self.norm(shortcut) # A "channel" FFN x = self.dropout(self.to_ffn(x)) # gelu according to https://arxiv.org/pdf/2105.08050.pdf x = self.activation(x) # "spatial" projection (over T) x = self.spatial_gating_unit(x) # "channel" projection x = self.from_sgu(x) x = self.dropout(x) return x + shortcut class TransformerDecoderBase(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, activation_type: str = "gelu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6, rpr_value_on: bool = True, ra_type: Optional[str] = None, ): super().__init__() self.d_model = d_model self.d_ff = d_ff if d_ff is not None else 4 * d_model if rpr_k is not None: self.self_attn = MultiHeadedRelativeAttention(num_heads, d_model, rpr_k, pdrop, scale, d_k=d_k, rpr_value_on=rpr_value_on) self.src_attn = MultiHeadedRelativeAttention(num_heads, d_model, rpr_k, pdrop, scale, d_k=d_k, rpr_value_on=rpr_value_on) else: self.self_attn = MultiHeadedAttention(num_heads, d_model, pdrop, scale, d_k=d_k, ra_type=ra_type) self.src_attn = MultiHeadedAttention(num_heads, d_model, pdrop, scale, d_k=d_k, ra_type=ra_type) self.ffn = nn.Sequential( Dense(self.d_model, self.d_ff), nn.Dropout(ffn_pdrop), get_activation(activation_type), Dense(self.d_ff, self.d_model), ) self.ln1 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.ln2 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.ln3 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.dropout = nn.Dropout(pdrop) class PreLNTransformerDecoder(TransformerDecoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: x, memory, src_mask, tgt_mask = inputs h = self.ln1(x) x = x + self.dropout(self.self_attn((h, h, h, tgt_mask))) h = self.ln2(x) x = x + self.dropout(self.src_attn((h, memory, memory, src_mask))) h = self.ln3(x) x = x + self.dropout(self.ffn(h)) return x class PreLNBeforeResConnTransformerDecoder(TransformerDecoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: x, memory, src_mask, tgt_mask = inputs x = self.ln1(x) x = x + self.dropout(self.self_attn((x, x, x, tgt_mask))) x = self.ln2(x) x = x + self.dropout(self.src_attn((x, memory, memory, src_mask))) x = self.ln3(x) x = x + self.dropout(self.ffn(x)) return x class PostLNTransformerDecoder(nn.Module): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: x, memory, src_mask, tgt_mask = inputs x = x + self.dropout(self.self_attn((x, x, x, tgt_mask))) x = self.ln2(x) x = x + self.dropout(self.src_attn((x, memory, memory, src_mask))) x = self.ln3(x) x = x + self.dropout(self.ffn(x)) x = self.ln1(x) return x class TransformerEncoderStack(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, layer_drop: float = 0.0, ra_type: Optional[str] = None, transformer_type: Optional[str] = False, **kwargs, ): super().__init__() self.encoders = nn.ModuleList() if layer_norms_after or transformer_type == "post-layer-norm": logger.info("Using post-layer-norm transformer (encoder)") TransformerEncoder = PostLNTransformerEncoder self.ln = nn.Identity() elif transformer_type == "pre-layer-norm": TransformerEncoder = PreLNTransformerEncoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) else: # transformer_type == "pre-layer-norm-before-resconn" logger.info("Using layer norm before residual connections (encoder)") if layer_norms_after: raise Exception(f"Mutually exclusive options ({transformer_type}) and layer_norms_after=True)",) TransformerEncoder = PreLNBeforeResConnTransformerEncoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) self.output_dim = d_model self.layer_drop = layer_drop if not is_sequence(rpr_k): rpr_k = [rpr_k] * layers elif len(rpr_k) == 1: rpr_k = [rpr_k[0]] * layers for i in range(layers): self.encoders.append( TransformerEncoder( num_heads, d_model, pdrop, scale, activation, d_ff, d_k, rpr_k=rpr_k[i], ffn_pdrop=ffn_pdrop, layer_norm_eps=layer_norm_eps, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on, ra_type=ra_type ) ) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, mask = inputs for layer in self.encoders: pdrop = np.random.random() if not self.training or (pdrop >= self.layer_drop): x = layer((x, mask)) return self.ln(x) class GatedMLPEncoderStack(nn.Module): """Following https://arxiv.org/pdf/2105.08050.pdf """ def __init__( self, d_model: int, pdrop: float, layers: int = 1, nctx: int = 256, activation: str = "gelu", d_ff: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6, layer_drop: float = 0.0, **kwargs, ): super().__init__() self.encoders = nn.ModuleList() self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) self.output_dim = d_model self.layer_drop = layer_drop for i in range(layers): self.encoders.append( GatedMLPEncoder( d_model, pdrop, nctx, activation, d_ff, ffn_pdrop=ffn_pdrop, layer_norm_eps=layer_norm_eps, ) ) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, mask = inputs for layer in self.encoders: pdrop = np.random.random() if not self.training or (pdrop >= self.layer_drop): x = layer((x, mask)) return self.ln(x) class TransformerEncoderStackWithLengths(TransformerEncoderStack): def __init__( self, num_heads: int, d_model: int, pdrop: bool, scale: bool = True, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, input_sz: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, layer_drop: float = 0.0, ra_type: Optional[str] = None, transformer_type: Optional[str] = None, **kwargs, ): super().__init__(num_heads, d_model, pdrop, scale, layers, activation, d_ff, d_k, rpr_k, ffn_pdrop, layer_norms_after, layer_norm_eps, windowed_ra, rpr_value_on, layer_drop, ra_type, transformer_type, **kwargs) self.proj = WithDropout(pytorch_linear(input_sz, d_model), pdrop) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, lengths = inputs x = self.proj(x) max_seqlen = x.shape[1] mask = sequence_mask(lengths, max_seqlen).to(x.device) return super().forward((x, mask.unsqueeze(1).unsqueeze(1))) class TransformerEncoderStackWithTimeMask(TransformerEncoderStack): def __init__( self, num_heads: int, d_model: int, pdrop: bool, scale: bool = True, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, input_sz: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, layer_drop: float = 0.0, ra_type: Optional[str] = None, transformer_type: Optional[str] = None, **kwargs, ): super().__init__(num_heads, d_model, pdrop, scale, layers, activation, d_ff, d_k, rpr_k, ffn_pdrop, layer_norms_after, layer_norm_eps, windowed_ra, rpr_value_on, layer_drop, ra_type, transformer_type, **kwargs) self.proj = WithDropout(pytorch_linear(input_sz, d_model), pdrop) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, lengths = inputs x = self.proj(x) max_seqlen = x.shape[1] mask = subsequent_mask(max_seqlen).to(x.device) return super().forward((x, mask.unsqueeze(1).unsqueeze(1))) class TransformerDecoderStack(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, layers: int = 1, activation_type: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, layer_drop: float = 0.0, rpr_value_on: bool = True, ra_type: Optional[str] = None, transformer_type: Optional[str] = None, **kwargs, ): super().__init__() self.decoders = nn.ModuleList() self.layer_drop = layer_drop if layer_norms_after or transformer_type == "post-layer-norm": logger.info("Using post-layer-norm transformer (decoder)") TransformerDecoder = PostLNTransformerDecoder self.ln = nn.Identity() elif transformer_type == "pre-layer-norm": TransformerDecoder = PreLNTransformerDecoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) else: # transformer_type == "pre-layer-norm-before-resconn" logger.info("Using layer norm before residual connections (decoder)") if layer_norms_after: raise Exception(f"Mutually exclusive options ({transformer_type}) and layer_norms_after=True)",) TransformerDecoder = PreLNBeforeResConnTransformerDecoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) if not is_sequence(rpr_k): rpr_k = [rpr_k] * layers elif len(rpr_k) == 1: rpr_k = [rpr_k[0]] * layers for i in range(layers): self.decoders.append( TransformerDecoder(num_heads, d_model, pdrop, scale, activation_type, d_ff, d_k=d_k, rpr_k=rpr_k[i], ffn_pdrop=ffn_pdrop, layer_norm_eps=layer_norm_eps, rpr_value_on=rpr_value_on, ra_type=ra_type) ) def forward(self, inputs): x, memory, src_mask, tgt_mask = inputs for layer in self.decoders: pdrop = np.random.random() if not self.training or (pdrop >= self.layer_drop): x = layer((x, memory, src_mask, tgt_mask)) return self.ln(x) def update_lengths(lengths, eoses, idx): """Update the length of a generated tensor based on the first EOS found. This is useful for a decoding situation where tokens after an EOS can be something other than EOS. This also makes sure that a second generated EOS doesn't affect the lengths. :param lengths: `torch.LongTensor`: The lengths where zero means an unfinished sequence. :param eoses: `torch.ByteTensor`: A mask that has 1 for sequences that generated an EOS. :param idx: `int`: What value to fill the finished lengths with (normally the current decoding timestep). :returns: `torch.Tensor`: The updated lengths tensor (same shape and type). """ # If a length is 0 it has never had a length set so it is eligible to have # this EOS be the length. updatable_lengths = lengths == 0 # If this length can be updated AND this token is an eos lengths_mask = updatable_lengths & eoses return lengths.masked_fill(lengths_mask, idx) def gnmt_length_penalty(lengths, alpha=0.8): """Calculate a length penalty from https://arxiv.org/pdf/1609.08144.pdf The paper states the penalty as (5 + |Y|)^a / (5 + 1)^a. This is implemented as ((5 + |Y|) / 6)^a for a (very) tiny performance boost :param lengths: `torch.LongTensor`: [B, K] The lengths of the beams. :param alpha: `float`: A hyperparameter. See Table 2 for a search on this parameter. :returns: `torch.FloatTensor`: [B, K, 1] The penalties. """ lengths = lengths.to(torch.float) penalty = torch.pow(((5 + lengths) / 6), alpha) return penalty.unsqueeze(-1) def no_length_penalty(lengths): """A dummy function that returns a no penalty (1).""" return torch.ones_like(lengths).to(torch.float).unsqueeze(-1) def repeat_batch(t, K, dim=0): """Repeat a tensor while keeping the concept of a batch. :param t: `torch.Tensor`: The tensor to repeat. :param K: `int`: The number of times to repeat the tensor. :param dim: `int`: The dimension to repeat in. This should be the batch dimension. :returns: `torch.Tensor`: The repeated tensor. The new shape will be batch size * K at dim, the rest of the shapes will be the same. Example:: >>> a = torch.arange(10).view(2, -1) >>> a tensor([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) >>> a.repeat(2, 1) tensor([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) >>> repeat_batch(a, 2) tensor([[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [5, 6, 7, 8, 9]]) """ shape = t.shape tiling = [1] * (len(shape) + 1) tiling[dim + 1] = K tiled = t.unsqueeze(dim + 1).repeat(tiling) old_bsz = shape[dim] new_bsz = old_bsz * K new_shape = list(shape[:dim]) + [new_bsz] + list(shape[dim + 1 :]) return tiled.view(new_shape) class BeamSearchBase: def __init__(self, beam=1, length_penalty=None, **kwargs): self.length_penalty = length_penalty if length_penalty else no_length_penalty self.K = beam def init(self, encoder_outputs): pass def step(self, paths, extra): pass def update(self, beams, extra): pass def __call__(self, encoder_outputs, **kwargs): """Perform batched Beam Search. Note: The paths and lengths generated do not include the <GO> token. :param encoder_outputs: `namedtuple` The outputs of the encoder class. :param init: `Callable(ecnoder_outputs: encoder_outputs, K: int)` -> Any: A callable that is called once at the start of the search to initialize things. This returns a blob that is passed to other callables. :param step: `Callable(paths: torch.LongTensor, extra) -> (probs: torch.FloatTensor, extra): A callable that is does a single decoding step. It returns the log probabilities over the vocabulary in the last dimension. It also returns any state the decoding process needs. :param update: `Callable(beams: torch.LongTensor, extra) -> extra: A callable that is called to edit the decoding state based on the selected best beams. :param length_penalty: `Callable(lengths: torch.LongTensor) -> torch.floatTensor A callable that generates a penalty based on the lengths. Lengths is [B, K] and the returned penalty should be [B, K, 1] (or [B, K, V] to have token based penalties?) :Keyword Arguments: * *beam* -- `int`: The number of beams to use. * *mxlen* -- `int`: The max number of steps to run the search for. :returns: tuple(preds: torch.LongTensor, lengths: torch.LongTensor, scores: torch.FloatTensor) preds: The predicted values: [B, K, max(lengths)] lengths: The length of each prediction [B, K] scores: The score of each path [B, K] """ mxlen = kwargs.get("mxlen", 100) bsz = encoder_outputs.output.shape[0] device = encoder_outputs.output.device with torch.no_grad(): extra = self.init(encoder_outputs) paths = torch.full((bsz, self.K, 1), Offsets.GO, dtype=torch.long, device=device) # This tracks the log prob of each beam. This is distinct from score which # is based on the log prob and penalties. log_probs = torch.zeros((bsz, self.K), dtype=torch.float, device=device) # Tracks the lengths of the beams, unfinished beams have lengths of zero. lengths = torch.zeros((bsz, self.K), dtype=torch.long, device=device) for i in range(mxlen - 1): probs, extra = self.step(paths, extra) V = probs.shape[-1] probs = probs.view((bsz, self.K, V)) # [B, K, V] if i > 0: # This mask is for all beams that are done. done_mask = (lengths != 0).unsqueeze(-1) # [B, K, 1] # Can creating this mask be moved out of the loop? It never changes but we don't have V # This mask selects the EOS token eos_mask = torch.zeros((1, 1, V), dtype=done_mask.dtype, device=device) eos_mask[:, :, Offsets.EOS] = 1 # This mask selects the EOS token of only the beams that are done. mask = done_mask & eos_mask # Put all probability mass on the EOS token for finished beams. # Otherwise as the other beams get longer they will all give # up and eventually select this beam and all outputs become # the same. probs = probs.masked_fill(done_mask, -np.inf) probs = probs.masked_fill(mask, 0) probs = log_probs.unsqueeze(-1) + probs # [B, K, V] # Calculate the score of the beam based on the current length. path_scores = probs / self.length_penalty(lengths.masked_fill(lengths == 0, i + 1)) else: # On the first step we only look at probabilities for the first beam. # If we don't then the probs will be the same for each beam # This means the same token will be selected for each beam # And we won't get any diversity. # Using only the first beam ensures K different starting points. path_scores = probs[:, 0, :] flat_scores = path_scores.view(bsz, -1) # [B, K * V] best_scores, best_idx = flat_scores.topk(self.K, 1) # Get the log_probs of the best scoring beams log_probs = probs.view(bsz, -1).gather(1, best_idx).view(bsz, self.K) best_beams = best_idx // V # Get which beam it came from best_idx = best_idx % V # Get the index of the word regardless of which beam it is. # Best Beam index is relative within the batch (only [0, K)). # This makes the index global (e.g. best beams for the second # batch example is in [K, 2*K)). offsets = torch.arange(bsz, dtype=torch.long, device=device) * self.K offset_beams = best_beams + offsets.unsqueeze(-1) flat_beams = offset_beams.view(bsz * self.K) # Select the paths to extend based on the best beams flat_paths = paths.view(bsz * self.K, -1) new_paths = flat_paths[flat_beams, :].view(bsz, self.K, -1) # Add the selected outputs to the paths paths = torch.cat([new_paths, best_idx.unsqueeze(-1)], dim=2) # Select the lengths to keep tracking based on the valid beams left. lengths = lengths.view(-1)[flat_beams].view((bsz, self.K)) extra = self.update(flat_beams, extra) # Updated lengths based on if we hit EOS last = paths[:, :, -1] eoses = last == Offsets.EOS lengths = update_lengths(lengths, eoses, i + 1) if (lengths != 0).all(): break else: # This runs if the loop didn't break meaning one beam hit the max len # Add an EOS to anything that hasn't hit the end. This makes the scores real. probs, extra = self.step(paths, extra) V = probs.size(-1) probs = probs.view((bsz, self.K, V)) probs = probs[:, :, Offsets.EOS] # Select the score of EOS # If any of the beams are done mask out the score of this EOS (they already had an EOS) probs = probs.masked_fill((lengths != 0), 0) log_probs = log_probs + probs end_tokens = torch.full((bsz, self.K, 1), Offsets.EOS, device=device, dtype=paths.dtype) paths = torch.cat([paths, end_tokens], dim=2) lengths = update_lengths(lengths, torch.ones_like(lengths) == 1, mxlen) lengths = update_lengths(lengths, torch.ones_like(lengths) == 1, mxlen) best_scores = log_probs / self.length_penalty(lengths).squeeze(-1) # Slice off the Offsets.GO token paths = paths[:, :, 1:] return paths, lengths, best_scores def checkpoint_for(model_base, epoch, tick_type='epoch'): return '{}-{}-{}'.format(model_base, tick_type, epoch+1) def rm_old_checkpoints(base_path, current_epoch, last_n=10): for i in range(0, current_epoch-last_n): checkpoint_i = checkpoint_for(base_path, i) for extension in ('.pth', '.npz'): checkpoint_name = checkpoint_i + extension if os.path.exists(checkpoint_name): os.remove(checkpoint_name) def find_latest_checkpoint(checkpoint_dir: str, wildcard="checkpoint") -> Tuple[str, int]: step_num = 0 for f in glob.glob(os.path.join(checkpoint_dir, f"{wildcard}*")): base = os.path.basename(f) if "-" not in base: continue last = base.split("-")[-1] for x in ('.pth', '.npz'): last = last.replace(x, '', -1) this_step_num = int(last) if this_step_num > step_num: checkpoint = f step_num = this_step_num return checkpoint, step_num def save_checkpoint(model: torch.nn.Module, model_base: str, count: int, tick_type: str = 'epoch', save_npz: bool = False): from eight_mile.pytorch.serialize import save_tlm_npz, save_tlm_output_npz, save_transformer_seq2seq_npz, save_transformer_de_npz checkpoint_name = checkpoint_for(model_base, count, tick_type=tick_type) # Its possible due to how its called that we might save the same checkpoint twice if we dont check first if os.path.exists(checkpoint_name): logger.info("Checkpoint already exists: %s", checkpoint_name) return logger.info("Creating checkpoint: %s", checkpoint_name) model_ = model.module if hasattr(model, 'module') else model torch.save(model_.state_dict(), checkpoint_name+'.pth') if save_npz: if hasattr(model_, 'decoder'): save_transformer_seq2seq_npz(model_, checkpoint_name+'.npz') elif hasattr(model_, 'reduction_layer'): save_transformer_de_npz(model_, checkpoint_name+'.npz') elif hasattr(model_, 'output_layer'): save_tlm_output_npz(model_, checkpoint_name+'.npz') else: save_tlm_npz(model_, checkpoint_name+'.npz') if tick_type == 'epoch': rm_old_checkpoints(model_base, count) def init_distributed(local_rank): if local_rank == -1: # https://github.com/kubeflow/pytorch-operator/issues/128 # https://github.com/pytorch/examples/blob/master/imagenet/main.py logger.info("Setting local rank to RANK env variable") local_rank = int(os.environ['RANK']) logger.warning("Local rank (%d)", local_rank) # In an env like k8s with kubeflow each worker will only see a single gpu # with an id of 0. If the gpu count is 1 then we are probably in an env like # that so we should just use the first (and only) gpu avaiable if torch.cuda.device_count() == 1: torch.cuda.set_device(0) device = torch.device("cuda", 0) # This program assumes multiprocess/multi-device on a single node. Each # process gets a rank (via cli or ENV variable) and uses that rank to select # which gpu to use. This only makes sense on a single node, if you had 4 # processes on 2 nodes where each node has 2 GPUs then the ranks would be # 0, 1, 2, 3 but the gpus numbers would be node 0: 0, 1 and node 1: 0, 1 # and this assignment to gpu 3 would fail. On a single node with 4 processes # and 4 gpus the rank and gpu ids will align and this will work else: torch.cuda.set_device(local_rank) device = torch.device("cuda", local_rank) torch.distributed.init_process_group(backend='nccl', init_method='env://') return device, local_rank class AttentionReduction(nn.Module): """ This is a reduction that is given Q, K, V and a mask vector. Different from base reductions, which get an embedding stack """ def __init__(self): super().__init__() def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """Inputs are the same as for a normal attention function, but the output here is a single tensor, ``[B, H]`` :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: sentence-level encoding with dim [B, d_model] """ class SingleHeadReduction(AttentionReduction): """ Implementation of the "self_attention_head" layer from the conveRT paper (https://arxiv.org/pdf/1911.03688.pdf) """ def __init__( self, d_model: int, dropout: float = 0.0, scale: bool = False, d_k: Optional[int] = None, pooling: str = 'sqrt_length', ): """ :param d_model: The model hidden size :param dropout (``float``): The amount of dropout to use :param scale: should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly """ super().__init__() self.output_dim = d_model if d_k is None: self.d_k = d_model else: self.d_k = d_k self.w_Q = Dense(d_model, self.d_k) self.w_K = Dense(d_model, self.d_k) if scale: self.attn_fn = SeqScaledDotProductAttention(dropout) else: self.attn_fn = SeqDotProductAttention(dropout) self.attn = None pooling = pooling.lower() self.fill = 0 if pooling == 'max': self.pool = self._max_pool self.fill = -1e9 elif pooling == 'mean': self.pool = self._mean_pool else: self.pool = self._sqrt_length_pool def _sqrt_length_pool(self, x, seq_lengths): x = x.sum(dim=1) # [B, D] x = x * seq_lengths.float().sqrt().unsqueeze(-1) return x def _mean_pool(self, x, seq_lengths): return torch.sum(x, 1, keepdim=False) / torch.unsqueeze(seq_lengths, -1).to(x.dtype).to( x.device ) def _max_pool(self, x, _): x, _ = torch.max(x, 1, keepdim=False) return x def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """According to conveRT model's graph, they project token encodings to lower-dimensional query and key in single head, use them to calculate the attention score matrix that has dim [B, T, T], then sum over the query dim to get a tensor with [B, 1, T] (meaning the amount of attentions each token gets from all other tokens), scale it by sqrt of sequence lengths, then use it as the weight to weighted sum the token encoding to get the sentence encoding. we implement it in an equivalent way that can best make use of the eight_mile codes: do the matrix multiply with value first, then sum over the query dimension. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: sentence-level encoding with dim [B, d_model] """ query, key, value, mask = qkvm batchsz = query.size(0) seq_mask = mask.squeeze(1).squeeze(1) # [B, T] seq_lengths = seq_mask.sum(dim=1) # (B, H, T, D), still have num_heads = 1 to use the attention function defined in eight_miles query = self.w_Q(query).view(batchsz, -1, 1, self.d_k).transpose(1, 2) key = self.w_K(key).view(batchsz, -1, 1, self.d_k).transpose(1, 2) value = value.view(batchsz, -1, 1, self.output_dim).transpose(1, 2) x = self.attn_fn((query, key, value, mask)) # [B, 1, T, D] self.attn = self.attn_fn.attn x = x.squeeze(1) # [B, T, D] x = x.masked_fill(seq_mask.unsqueeze(-1) == MASK_FALSE, self.fill) return self.pool(x, seq_lengths) class TransformerDiscriminator(nn.Module): """A Transformer model that tries to predict if each token is real or fake This model is based on [ELECTRA: Pre-Training Text Encoders as Discriminators Rather Than Generators, Clark et al. 2019](https://openreview.net/pdf?id=r1xMH1BtvB). """ def __init__( self, embeddings, num_heads: int, d_model: int, dropout: bool, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, embeddings_reduction: str = 'sum', **kwargs, ): super().__init__() self.embeddings = EmbeddingsStack(embeddings, dropout, reduction=embeddings_reduction) self.weight_std = kwargs.get('weight_std', 0.02) assert self.embeddings.dsz == d_model self.transformer = TransformerEncoderStack( num_heads, d_model=d_model, pdrop=dropout, scale=True, layers=layers, activation=activation, d_ff=d_ff, rpr_k=rpr_k, d_k=d_k, layer_norms_after=layer_norms_after, layer_norm_eps=layer_norm_eps ) self.proj_to_output = pytorch_linear(d_model, 1) self.apply(self.init_layer_weights) self.lengths_feature = kwargs.get('lengths_feature', list(self.embeddings.keys())[0]) def init_layer_weights(self, module): if isinstance(module, (nn.Linear, nn.Embedding, nn.LayerNorm)): module.weight.data.normal_(mean=0.0, std=self.weight_std) if isinstance(module, (nn.Linear, nn.LayerNorm)) and module.bias is not None: module.bias.data.zero_() def forward(self, features): embedded = self.embeddings(features) x = features[self.lengths_feature] input_mask = torch.zeros(x.shape, device=x.device, dtype=torch.long).masked_fill(x != Offsets.PAD, 1).unsqueeze(1).unsqueeze(1) transformer_out = self.transformer((embedded, input_mask)) binary = self.proj_to_output(transformer_out) return torch.sigmoid(binary) def create_loss(self): return nn.BCELoss(reduction="none") class PooledSequenceCriterion(nn.Module): def __init__(self, LossFn=nn.BCEWithLogitsLoss, avg='token'): super().__init__() if avg == 'token': self.crit = LossFn() self._norm = self._no_norm else: self.crit = LossFn() self._norm = self._batch_norm def _batch_norm(self, loss, inputs): return loss / inputs.size()[0] def _no_norm(self, loss, inputs): return loss def forward(self, inputs, targets): """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. """ #inputs = inputs.transpose(0, 1) C = inputs.shape[-1] flat_targets = torch.nn.functional.one_hot(targets, C) # Get the offsets of the non-zero targets, the values of these are all on flat_targets = (torch.sum(flat_targets, axis=1) != 0).float() flat_targets[:, Offsets.PAD] = 0 flat_targets[:, Offsets.EOS] = 0 flat_targets[:, Offsets.GO] = 0 if len(inputs.shape) > 2: max_per_vocab = inputs.max(0)[0] loss = self.crit(max_per_vocab, flat_targets) else: loss = self.crit(inputs, flat_targets) return self._norm(loss, inputs) class SequenceCriterion(nn.Module): def __init__(self, LossFn=nn.NLLLoss, avg='token'): super().__init__() if avg == 'token': # self.crit = LossFn(ignore_index=Offsets.PAD, reduction='elementwise-mean') self.crit = LossFn(ignore_index=Offsets.PAD, size_average=True) self._norm = self._no_norm else: self.crit = LossFn(ignore_index=Offsets.PAD, size_average=False) self._norm = self._batch_norm def _batch_norm(self, loss, inputs): return loss / inputs.size()[0] def _no_norm(self, loss, inputs): return loss def forward(self, inputs, targets): """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, .., C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. """ total_sz = targets.nelement() loss = self.crit(inputs.view(total_sz, -1), targets.view(total_sz)) return self._norm(loss, inputs) def pytorch_conv1d(in_channels, out_channels, fsz, unif=0, padding=0, initializer=None, stride=1, bias=True, groups=1): c = nn.Conv1d(in_channels, out_channels, fsz, padding=padding, stride=stride, bias=bias, groups=groups) if unif > 0: c.weight.data.uniform_(-unif, unif) elif initializer == "ortho": nn.init.orthogonal_(c.weight) if bias: nn.init.constant_(c.bias, 0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform_(c.weight) if bias: nn.init.constant_(c.bias, 0) elif initializer == "normal": nn.init.normal(mean=0, std=unif) if bias: nn.init.constant_(c.bias, 0) else: nn.init.xavier_uniform_(c.weight) if bias: nn.init.constant_(c.bias, 0) return c def tie_weight(to_layer, from_layer): """Assigns a weight object to the layer weights. This method exists to duplicate baseline functionality across packages. :param to_layer: the pytorch layer to assign weights to :param from_layer: pytorch layer to retrieve weights from """ to_layer.weight = from_layer.weight class BilinearAttention(nn.Module): def __init__(self, in_hsz: int, out_hsz: int = 1, bias_x: bool = True, bias_y: bool = True): super().__init__() self.in_hsz = in_hsz self.out_hsz = out_hsz self.bias_x = bias_x self.bias_y = bias_y a1 = in_hsz a2 = in_hsz if self.bias_x: a1 += 1 if self.bias_y: a2 += 1 self.weight = nn.Parameter(torch.Tensor(out_hsz, in_hsz + bias_x, in_hsz + bias_y)) self.reset_parameters() def reset_parameters(self): nn.init.zeros_(self.weight) #nn.init.orthogonal_(self.weight) def forward(self, x, y, mask): r""" Args: x: ``[B, T, H]``. y: ``[B, T, H]``. Returns: ~torch.Tensor: A scoring tensor of shape ``[batch_size, n_out, seq_len, seq_len]``. If ``n_out=1``, the dimension for ``n_out`` will be squeezed automatically. """ if self.bias_x is True: ones = torch.ones(x.shape[:-1] + (1,), device=x.device) x = torch.cat([x, ones], -1) if self.bias_y is True: ones = torch.ones(x.shape[:-1] + (1,), device=y.device) y = torch.cat([y, ones], -1) x = x.unsqueeze(1) y = y.unsqueeze(1) u = x @ self.weight s = u @ y.transpose(-2, -1) if self.out_hsz == 1: s = s.squeeze(1) s = s.masked_fill((mask.bool() == MASK_FALSE).unsqueeze(1), -1e9) return s class TripletLoss(nn.Module): """Provide a Triplet Loss using the reversed batch for negatives""" def __init__(self, model): super().__init__() self.score = nn.CosineSimilarity(dim=1) self.model = model def forward(self, inputs, targets): # reverse the batch and use as a negative example neg = targets.flip(0) query = self.model.encode_query(inputs) response = self.model.encode_response(targets) neg_response = self.model.encode_response(neg) pos_score = self.score(query, response) neg_score = self.score(query, neg_response) score = neg_score - pos_score score = score.masked_fill(score < 0.0, 0.0).sum(0) return score class ContrastiveLoss(nn.Module): def __init__(self, model, t=1.0, train_temperature=True): super().__init__() self.model = model if t is None: t = 1.0 self.t = nn.Parameter(torch.tensor(t).float(), requires_grad=train_temperature) def forward(self, inputs, targets): query = self.model.encode_query(inputs) # [B, H] response = self.model.encode_response(targets) # [B, H] query = F.normalize(query, p=2, dim=1) response = F.normalize(response, p=2, dim=1) labels = torch.arange(query.shape[0], device=query.device) logits = torch.mm(query, response.T) * self.t.exp() loss = F.cross_entropy(logits, labels) return loss class SymmetricContrastiveLoss(nn.Module): def __init__(self, model, t=1.0, train_temperature=True): super().__init__() self.model = model if t is None: t = 1.0 self.t = nn.Parameter(torch.tensor(t).float(), requires_grad=train_temperature) def forward(self, inputs, targets): query = self.model.encode_query(inputs) # [B, H] response = self.model.encode_response(targets) # [B, H] query = F.normalize(query, p=2, dim=1) response = F.normalize(response, p=2, dim=1) labels = torch.arange(query.shape[0], device=query.device) logits = torch.mm(query, response.T) * self.t.exp() loss_1 = F.cross_entropy(logits, labels) loss_2 = F.cross_entropy(logits.T, labels) loss = (loss_1 + loss_2) * 0.5 return loss class AllLoss(nn.Module): def __init__(self, model, warmup_steps=10000, reduction_type='sum'): r"""Loss from here https://arxiv.org/pdf/1705.00652.pdf see section 4 We want to minimize the negative log prob of y given x -log P(y|x) P(y|x) P(x) = P(x, y) Chain Rule of Probability P(y|x) = P(x, y) / P(x) Algebra P(y|x) = P(x, y) / \sum_\hat(y) P(x, y = \hat(y)) Marginalize over all possible ys to get the probability of x P_approx(y|x) = P(x, y) / \sum_i^k P(x, y_k) Approximate the Marginalization by just using the ys in the batch S(x, y) is the score (cosine similarity between x and y in this case) from our neural network P(x, y) = e^S(x, y) P(y|x) = e^S(x, y) / \sum_i^k e^S(x, y_k) log P(y|x) = log( e^S(x, y) / \sum_i^k e^S(x, y_k)) log P(y|x) = S(x, y) - log \sum_i^k e^S(x, y_k) -log P(y|x) = -(S(x, y) - log \sum_i^k e^S(x, y_k)) """ super().__init__() self.score = nn.CosineSimilarity(dim=-1) self.model = model self.max_scale = math.sqrt(self.model.embeddings.output_dim) self.steps = 0 self.warmup_steps = warmup_steps self.reduction = torch.mean if reduction_type == 'mean' else torch.sum def forward(self, inputs, targets): # This is the cosine distance annealing referred to in https://arxiv.org/pdf/1911.03688.pdf fract = min(self.steps / self.warmup_steps, 1) c = (self.max_scale-1) * fract + 1 self.steps += 1 # These will get broadcast to [B, B, H] query = self.model.encode_query(inputs).unsqueeze(1) # [B, 1, H] response = self.model.encode_response(targets).unsqueeze(0) # [1, B, H] # all_scores is now a batch x batch matrix where index (i, j) is the score between # the i^th x vector and the j^th y vector all_score = c * self.score(query, response) # [B, B] # The diagonal has the scores of correct pair, (i, i) pos_score = torch.diag(all_score) # vec_log_sum_exp will calculate the batched log_sum_exp in a numerically stable way # the result is a [B, 1] vector which we squeeze to make it [B] to match the diag # Because we are minimizing the negative log we turned the division into a subtraction here loss = pos_score - vec_log_sum_exp(all_score, -1).squeeze() # Batch loss loss = self.reduction(loss) # minimize the negative loss return -loss class CosineSimilarityLoss(nn.Module): def __init__(self, neg_value=0.3, pos_value=0.8): super().__init__() self.pos_value = pos_value self.neg_value = neg_value def forward(self, embeddings_reduction, labels): hsz = int(embeddings_reduction.shape[-1]//2) label_values = torch.zeros_like(labels, dtype=torch.float) label_values[labels == 0] = self.neg_value label_values[labels == 1] = self.pos_value output = torch.cosine_similarity(embeddings_reduction[:,:hsz], embeddings_reduction[:,hsz:]) loss = F.mse_loss(output, label_values.view(-1), reduction='mean') return loss class OnlineContrastiveLoss(nn.Module): def __init__(self): super().__init__() def forward(self, embeddings_reduction, labels): hsz = int(embeddings_reduction.shape[-1]//2) x = embeddings_reduction[:,:hsz] y = embeddings_reduction[:,hsz:] distance_matrix = 1-F.cosine_similarity(x, y) negs = distance_matrix[labels == 0] poss = distance_matrix[labels == 1] # select hard positive and hard negative pairs negative_pairs = negs[negs < (poss.max() if len(poss) > 1 else negs.mean())] positive_pairs = poss[poss > (negs.min() if len(negs) > 1 else poss.mean())] positive_loss = positive_pairs.pow(2).sum() negative_loss = F.relu(0.5 - negative_pairs).pow(2).sum() loss = positive_loss + negative_loss return loss class TwoHeadConcat(AttentionReduction): """Use two parallel SingleHeadReduction, and concatenate the outputs. It is used in the conveRT paper (https://arxiv.org/pdf/1911.03688.pdf)""" def __init__(self, d_model, dropout, scale=False, d_k=None, pooling='sqrt_length'): """Two parallel 1-head self-attention, then concatenate the output :param d_model: dim of the self-attention :param dropout: dropout of the self-attention :param scale: scale fo the self-attention :param d_k: d_k of the self-attention :return: concatenation of the two 1-head attention """ super().__init__() self.output_dim = 2*d_model self.reduction1 = SingleHeadReduction(d_model, dropout, scale=scale, d_k=d_k, pooling=pooling) self.reduction2 = SingleHeadReduction(d_model, dropout, scale=scale, d_k=d_k, pooling=pooling) def forward(self, inputs: torch.Tensor): x = inputs encoding1 = self.reduction1(x) encoding2 = self.reduction2(x) x = torch.cat([encoding1, encoding2], dim=-1) return x class ConveRTFFN(nn.Module): """Implementation of the FFN layer from the convert paper (https://arxiv.org/pdf/1911.03688.pdf)""" def __init__(self, insz, hszs, outsz, pdrop): """ :param insz: input dim :param hszs: list of hidden sizes :param outsz: output dim :param pdrop: dropout of each hidden layer """ super().__init__() self.dense_stack = DenseStack(insz, hszs, activation='gelu', pdrop_value=pdrop, skip_connect=True, layer_norm=True) self.final = Dense(hszs[-1], outsz) self.proj = Dense(insz, outsz) if insz != outsz else nn.Identity() self.ln1 = nn.LayerNorm(insz, eps=1e-6) self.ln2 = nn.LayerNorm(outsz, eps=1e-6) def forward(self, inputs): x = self.ln1(inputs) x = self.dense_stack(x) x = self.final(x) x = x + self.proj(inputs) return self.ln2(x) class DualEncoderModel(nn.Module): """Abstract base for dual encoders We can assume that our dual encoder needs to end up in the same output plane between the encoders, and we can define the set of losses here that we are likely to need for most. """ def __init__(self, in_sz: int, stacking_layers: Union[int, List[int]] = None, d_out: int = 512, ffn_pdrop=0.1, in_sz_2=None, output_layer=False, output_activation='tanh', output_shared=False): super().__init__() if not in_sz_2: in_sz_2 = in_sz if stacking_layers: stacking_layers = listify(stacking_layers) if stacking_layers: self.ff1 = ConveRTFFN(in_sz, stacking_layers, d_out, ffn_pdrop) self.ff2 = ConveRTFFN(in_sz_2, stacking_layers, d_out, ffn_pdrop) elif output_layer or in_sz != d_out or in_sz != in_sz_2: activation = output_activation if output_layer else None self.ff1 = Dense(in_sz, d_out, activation=activation) if in_sz == in_sz_2 and output_shared: self.ff2 = self.ff1 else: self.ff2 = Dense(in_sz_2, d_out, activation=activation) else: self.ff1 = nn.Identity() self.ff2 = nn.Identity() self.output_dim = d_out def encode_query_base(self, query: torch.Tensor) -> torch.Tensor: pass def encode_response_base(self, response: torch.Tensor) -> torch.Tensor: pass def encode_query(self, query: torch.Tensor) -> torch.Tensor: tensor = self.encode_query_base(query) return self.ff1(tensor) def encode_response(self, response: torch.Tensor) -> torch.Tensor: tensor = self.encode_response_base(response) return self.ff2(tensor) def forward(self, query, response): encoded_query = self.encode_query(query) encoded_response = self.encode_response(response) return encoded_query, encoded_response def create_loss(self, loss_type='symmetric', init_temp=None, learn_temp=False): if loss_type == 'all': return AllLoss(self) elif loss_type == 'all_mean': return AllLoss(self, reduction_type='mean') elif loss_type == 'contrastive': return ContrastiveLoss(self, init_temp, learn_temp) elif loss_type == 'symmetric': return SymmetricContrastiveLoss(self, init_temp, learn_temp) return TripletLoss(self) class BasicDualEncoderModel(DualEncoderModel): """A simple encoder where the encoders are injected and supply the `encode_query_base` and `encode_response_base` """ def __init__(self, encoder_1: nn.Module, encoder_2: nn.Module, stacking_layers: Union[int, List[int]] = None, d_out: int = 512, ffn_pdrop=0.1): super().__init__(encoder_1.output_dim, stacking_layers, d_out, ffn_pdrop, in_sz_2=encoder_2.output_dim) self.encoder_1 = encoder_1 self.encoder_2 = encoder_2 def encode_query_base(self, query: torch.Tensor) -> torch.Tensor: return self.encoder_1(query) def encode_response_base(self, response: torch.Tensor) -> torch.Tensor: return self.encoder_2(response) class PairedModel(DualEncoderModel): """Legacy model for transformer-based dual encoder This is a dual-encoder transformer model which shares the lower layer encoder transformer sub-graph The reduction layer is attention based and takes the same input as the transformer layers. It pools the reprs Finally, the feed-forward stacks are applied via subclassing. Note that this model predates the more abstract `AbstractDualEncoder` which could accomplish the same thing by injecting the same `nn.Module` for encoder_1 and encoder_2 consisting of the transformer and reduction """ def __init__(self, embeddings, d_model: int, d_ff: int, dropout: float, num_heads: int, num_layers: int, stacking_layers: Optional[nn.Module] = None, d_out: Optional[int] = None, d_k: Optional[int] = None, weight_std: float = 0.02, rpr_k: Optional[int] = None, reduction_d_k: int = 64, ffn_pdrop: float = 0.1, windowed_ra: bool = False, rpr_value_on: bool = False, reduction_type: str = "2ha", freeze_encoders: bool = False, layer_norms_after: bool = False, embeddings_reduction: str = 'sum', layer_norm_eps: float=1e-6, output_layer: bool = False, output_activation: str = 'tanh', output_shared: bool = False, transformer_type: Optional[str]=None, **kwargs): super().__init__(2*d_model if reduction_type.startswith("2") else d_model, stacking_layers, d_out if d_out is not None else d_model, ffn_pdrop, None, output_layer, output_activation, output_shared) reduction_type = reduction_type.lower() self.reduce_fn = self._reduce_3 if reduction_type == "2ha": self.reduction_layer = TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k) elif reduction_type == "2ha_mean": self.reduction_layer = TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean") elif reduction_type == "2ha_max": self.reduction_layer = TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max") elif reduction_type == "sha": self.reduction_layer = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k) elif reduction_type == "sha_mean": self.reduction_layer = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean") elif reduction_type == "sha_max": self.reduction_layer = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max") elif reduction_type == 'max': self.reduce_fn = self._reduce_1 self.reduction_layer = MaxPool1D(self.output_dim) elif reduction_type == 'mean': self.reduce_fn = self._reduce_1 self.reduction_layer = MeanPool1D(self.output_dim) elif reduction_type == 'cls' or reduction_type == 'zero': self.reduce_fn = self._reduce_0 else: raise Exception("Unknown exception type") self.weight_std = weight_std ra_type = kwargs.get('ra_type') self.transformer = TransformerEncoderStack(num_heads=num_heads, d_model=d_model, pdrop=dropout, layers=num_layers, activation='gelu', d_ff=d_ff, ffn_pdrop=ffn_pdrop, d_k=d_k, rpr_k=rpr_k, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on, layer_norms_after=layer_norms_after, layer_norm_eps=layer_norm_eps, ra_type=ra_type, transformer_type=transformer_type) self.embeddings = EmbeddingsStack({'x': embeddings}, 0.0, False, embeddings_reduction) self.freeze = freeze_encoders self.apply(self.init_layer_weights) def init_layer_weights(self, module): if isinstance(module, (nn.Linear, nn.Embedding, nn.LayerNorm)): module.weight.data.normal_(mean=0.0, std=self.weight_std) if isinstance(module, (nn.Linear, nn.LayerNorm)) and module.bias is not None: module.bias.data.zero_() def _reduce_3(self, encoded, att_mask): """The attention modules originally created for DE have 3 (redundant) inputs, so use all 3 here """ return self.reduction_layer((encoded, encoded, encoded, att_mask)) def _reduce_1(self, encoded, att_mask): """The standard reduction modules use an input and a length """ lengths = att_mask.squeeze(1).squeeze(1).sum(-1) return self.reduction_layer((encoded, lengths)) def _reduce_0(self, encoded, _): """The [CLS] or <s> reduction on the first token just needs the first timestep """ return encoded[:, 0] def encode_query_base(self, query): query_mask = (query != Offsets.PAD) att_mask = query_mask.unsqueeze(1).unsqueeze(1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': query}) encoded_query = self.transformer((embedded, att_mask)) encoded_query = self.reduce_fn(encoded_query, att_mask) return encoded_query def encode_response_base(self, response): response_mask = (response != Offsets.PAD) att_mask = response_mask.unsqueeze(1).unsqueeze(1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': response}) encoded_response = self.transformer((embedded, att_mask)) encoded_response = self.reduce_fn(encoded_response, att_mask) return encoded_response class TransformerBoWPairedModel(DualEncoderModel): """2 Encoders (E1, E2). E1 is a Transformer followed by attention reduction. E2 is just a pooling of embeddings """ def __init__(self, embeddings, d_model, d_ff, dropout, num_heads, num_layers, stacking_layers=None, d_out=512, d_k=None, weight_std=0.02, rpr_k=None, reduction_d_k=64, ffn_pdrop=0.1, windowed_ra=False, rpr_value_on=False, reduction_type_1="2ha", freeze_encoders=False, layer_norms_after=False, transformer_type: Optional[str]=None, **kwargs): super().__init__(d_model, stacking_layers, d_out, ffn_pdrop) reduction_type_1 = reduction_type_1.lower() if reduction_type_1 == "2ha": self.reduction_layer_1 = nn.Sequential(TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k), nn.Linear(2*d_model, d_model)) elif reduction_type_1 == "2ha_mean": self.reduction_layer_1 = nn.Sequential(TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean"), nn.Linear(2 * d_model, d_model)) elif reduction_type_1 == "2ha_max": self.reduction_layer_1 = nn.Sequential(TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max"), nn.Linear(2 * d_model, d_model)) elif reduction_type_1 == "sha": self.reduction_layer_1 = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k) elif reduction_type_1 == "sha_mean": self.reduction_layer_1 = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean") elif reduction_type_1 == "sha_max": self.reduction_layer_1 = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max") else: raise Exception("Unknown exception type") self.weight_std = weight_std ra_type = kwargs.get('ra_type') self.transformer = TransformerEncoderStack(num_heads=num_heads, d_model=d_model, pdrop=dropout, layers=num_layers, activation='gelu', d_ff=d_ff, ffn_pdrop=ffn_pdrop, d_k=d_k, rpr_k=rpr_k, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on, layer_norms_after=layer_norms_after, ra_type=ra_type, transformer_type=transformer_type) self.embeddings = EmbeddingsStack({'x': embeddings}) self.freeze = freeze_encoders self.reduction_layer_2 = MaxPool1D(d_out) if reduction_type_1.endswith('max') else MeanPool1D(d_out) self.apply(self.init_layer_weights) def init_layer_weights(self, module): if isinstance(module, (nn.Linear, nn.Embedding, nn.LayerNorm)): module.weight.data.normal_(mean=0.0, std=self.weight_std) if isinstance(module, (nn.Linear, nn.LayerNorm)) and module.bias is not None: module.bias.data.zero_() def encode_query_base(self, query): query_mask = (query != Offsets.PAD) att_mask = query_mask.unsqueeze(1).unsqueeze(1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': query}) encoded_query = self.transformer((embedded, att_mask)) encoded_query = self.reduction_layer_1((encoded_query, encoded_query, encoded_query, att_mask)) return encoded_query def encode_response_base(self, response): response_lengths = torch.sum(response != Offsets.PAD, dim=1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': response}) encoded_response = self.reduction_layer_2((embedded, response_lengths)) return encoded_response class CudaTimer: """A CUDA timer context manager that can be used to track and record events The timer is only enabled if `MEAD_PYTORCH_TIMER` is true. If its enabled, it will cause a large slowdown (similar to `CUDA_LAUNCH_BLOCKING`). """ def __init__(self, name, sync_before=True): """ :param name: :param sync_before: """ self.enabled = str2bool(os.getenv('MEAD_PYTORCH_TIMER', False)) if self.enabled: self._name = name self._start = torch.cuda.Event(enable_timing=True) self._end = torch.cuda.Event(enable_timing=True) if sync_before: torch.cuda.synchronize() def __enter__(self): if self.enabled: self._start.record() def __exit__(self, exc_type, exc_value, exc_traceback): if self.enabled: self._end.record() torch.cuda.synchronize() elapsed = self._start.elapsed_time(self._end) print(f"({os.getpid()}) {self._name} {elapsed}") class WeightedNLLLoss(nn.Module): """Weight individual training examples """ def __init__(self): super().__init__() self.loss = nn.NLLLoss(reduction='none') def forward(self, pred, y, weight): loss = self.loss(pred, y) weight = weight.type_as(loss) return torch.dot(loss, weight)/len(weight) class WeightedMultiHeadNLLLoss(nn.Module): """Weight individual training examples with multiple heads """ def __init__(self): super().__init__() self.loss = nn.NLLLoss(reduction='none') def forward(self, preds, targets, weights): loss = sum([self.loss(pred, targets[:, i]) for i, pred in enumerate(preds)]) weights = weights.type_as(loss) return torch.dot(loss, weights)/len(weights) class WeightedSequenceLoss(nn.Module): """Weight individual training examples """ def __init__(self, LossFn: nn.Module = nn.NLLLoss, avg: str = "token"): super().__init__() self.avg = avg self.crit = LossFn(ignore_index=Offsets.PAD, reduction="none") if avg == 'token': self._reduce = self._mean else: self._reduce = self._sum def _mean(self, loss): return loss.mean(axis=1) def _sum(self, loss): return loss.sum(axis=1) def forward(self, inputs: torch.Tensor, targets: torch.Tensor, weight: torch.Tensor) -> torch.Tensor: """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, T, C] The scores from the model. Batch First :param targets: torch.LongTensor, [B, T] The labels. :param weight: sample weights [B, ] :returns: torch.FloatTensor, The loss. """ total_sz = targets.nelement() batchsz = weight.shape[0] loss = self.crit(inputs.view(total_sz, -1), targets.view(total_sz)).view(batchsz, -1) # [B, T] loss = torch.dot(self._reduce(loss), weight.type_as(loss)) / batchsz return loss def extra_repr(self): return f"reduction={self.avg}"
import copy import math import logging from typing import Dict, List, Optional, Tuple, Union import os import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torch.jit as jit import torch.autograd import contextlib import glob from eight_mile.utils import listify, Offsets, is_sequence, str2bool, get_alibi_slopes from eight_mile.utils import transition_mask as transition_mask_np MASK_FALSE = False logger = logging.getLogger("mead.layers") def sequence_mask(lengths: torch.Tensor, max_len: int = -1) -> torch.Tensor: """Generate a sequence mask of shape `BxT` based on the given lengths :param lengths: A `B` tensor containing the lengths of each example :param max_len: The maximum width (length) allowed in this mask (default to None) :return: A mask """ lens = lengths.cpu() if max_len < 0: max_len_v = torch.max(lens) else: max_len_v = max_len # 1 x T row = torch.arange(0, max_len_v).type_as(lens).view(1, -1) # B x 1 col = lens.view(-1, 1) # Broadcast to B x T, compares increasing number to max mask = row < col return mask def sequence_mask_mxlen(lengths: torch.Tensor, max_len: int) -> torch.Tensor: """Generate a sequence mask of shape `BxT` based on the given lengths, with a maximum value This function primarily exists to make ONNX tracing work better :param lengths: A `B` tensor containing the lengths of each example :param max_len: The maximum width (length) allowed in this mask (default to None) :return: A mask """ lens = lengths.cpu() max_len_v = max_len # 1 x T row = torch.arange(0, max_len_v).type_as(lens).view(1, -1) # B x 1 col = lens.view(-1, 1) # Broadcast to B x T, compares increasing number to max mask = row < col return mask @torch.jit.script def truncate_mask_over_time(mask: torch.Tensor, x: torch.Tensor) -> torch.Tensor: Tout = x.shape[1] mask = mask[:, :Tout] #mask = mask.narrow(1, 0, arcs_h.shape[1]) return mask def vec_log_sum_exp(vec: torch.Tensor, dim: int) -> torch.Tensor: """Vectorized version of log-sum-exp :param vec: Vector :param dim: What dimension to operate on :return: """ max_scores, idx = torch.max(vec, dim, keepdim=True) max_scores_broadcast = max_scores.expand_as(vec) return max_scores + torch.log(torch.sum(torch.exp(vec - max_scores_broadcast), dim, keepdim=True)) def unsort_batch(batch: torch.Tensor, perm_idx: torch.Tensor) -> torch.Tensor: """Undo the sort on a batch of tensors done for packing the data in the RNN. :param batch: The batch of data batch first `[B, ...]` :param perm_idx: The permutation index returned from the torch.sort. :returns: The batch in the original order. """ # Add ones to the shape of the perm_idx until it can broadcast to the batch perm_idx = perm_idx.to(batch.device) diff = len(batch.shape) - len(perm_idx.shape) extra_dims = [1] * diff perm_idx = perm_idx.view([-1] + extra_dims) return torch.scatter(torch.zeros_like(batch), 0, perm_idx.expand_as(batch), batch) def infer_lengths(tensor, dim=1): """Infer the lengths of an input based on the idea the Offsets.PAD was used as the padding token. :param tensor: The data to infer the length of, should be either [B, T] or [T, B] :param dim: The dimension which contains the sequential signal :returns: A Tensor of shape `[B]` that has the lengths for example item in the batch """ if len(tensor.shape) != 2: raise ValueError(f"infer_lengths only works with tensors wit two dims right now, got {len(tensor.shape)}") offsets = torch.arange(1, tensor.shape[dim] + 1, device=tensor.device, dtype=tensor.dtype).unsqueeze(1 - dim) non_pad_loc = (tensor != Offsets.PAD).to(tensor.dtype) return torch.argmax(non_pad_loc * offsets, dim=dim) + 1 def tensor_and_lengths(inputs) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: """Return either the unpacked inputs (2), or a `Tuple` of the input with None TODO: this function should probably be changed to always return the lengths second. To do this, we just need a sentinel value, e.g. <PAD> (0). The problem with doing this is that it might be possible to generate <PAD> in the middle of the tensor which would make that length invalid. :param inputs: Either a sequence of the `(tensor, length)` or just the `tensor` :return: A `Tuple` of `(tensor, length)` or `(tensor, None)` """ if isinstance(inputs, (list, tuple)): in_tensor, lengths = inputs else: in_tensor = inputs lengths = None return in_tensor, lengths class VariationalDropout(nn.Module): """Inverted dropout that applies the same mask at each time step.""" def __init__(self, pdrop: float = 0.5, batch_first: bool = False): """Variational Dropout :param pdrop: the percentage to drop """ super().__init__() self.pdrop = pdrop self.batch_first = batch_first def extra_repr(self): return "p=%.1f" % self.pdrop def forward(self, input: torch.Tensor) -> torch.Tensor: if not self.training: return input # Create a mask that covers a single time step if self.batch_first: dim0 = input.size(0) dim1 = 1 else: dim0 = 1 dim1 = input.size(1) mask = torch.zeros(dim0, dim1, input.size(2)).bernoulli_(1 - self.pdrop).to(input.device) mask = mask / self.pdrop # Broadcast the mask over the sequence return mask * input class SequenceLoss(nn.Module): """Computes the loss over a sequence""" def __init__(self, LossFn: nn.Module = nn.NLLLoss, avg: str = "token"): """A class that applies a Loss function to sequence via the folding trick. :param LossFn: A loss function to apply (defaults to `nn.NLLLoss`) :param avg: A divisor to apply, valid values are `token` and `batch` """ super().__init__() self.avg = avg if avg == "token": self.crit = LossFn(ignore_index=Offsets.PAD, reduction="mean") self._norm = self._no_norm else: self.crit = LossFn(ignore_index=Offsets.PAD, reduction="sum") self._norm = self._batch_norm def _batch_norm(self, loss, inputs): return loss / inputs.size()[0] def _no_norm(self, loss, inputs): return loss def forward(self, inputs: torch.Tensor, targets: torch.Tensor) -> torch.Tensor: """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, .., C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. """ total_sz = targets.nelement() loss = self.crit(inputs.view(total_sz, -1), targets.view(total_sz)) return self._norm(loss, inputs) def extra_repr(self): return f"reduction={self.avg}" class LabelSmoothingLoss(nn.Module): def __init__(self, label_smoothing, ignore_index=0, reduction="none"): """Use Label smoothing from `Szegedy et. al., 2015`_ to temper model confidence. Implements add-gamma smoothing where the probability mass of the gold label distribution is smoothed across classes. This implementation is based on `OpenNMT-py`_ but has been adapted to not require the vocabulary size up front. .. _Szegedy et. al., 2015: https://arxiv.org/abs/1512.00567 .. _OpenNMY-py: https://github.com/OpenNMT/OpenNMT-py/blob/938a4f561b07f4d468647823fab761cfb51f21da/onmt/utils/loss.py#L194 """ if not (0.0 < label_smoothing <= 1.0): raise ValueError(f"`label_smoothing` must be between 0.0 and 1.0, got {label_smoothing}") super().__init__() self.ignore_index = ignore_index self.label_smoothing = label_smoothing self.confidence = 1.0 - label_smoothing self.reduction = reduction if reduction != "mean" else "batchmean" def forward(self, output: torch.Tensor, target: torch.Tensor) -> torch.Tensor: """ :param output: The model outputs, [B, V] :param target: The target labels, [B] """ B, V = output.size() smoothed = torch.full((B, V), self.label_smoothing / (V - 2)) smoothed[:, self.ignore_index] = 0 smoothed = torch.scatter(smoothed, 1, target.unsqueeze(1), self.confidence) smoothed = smoothed.masked_fill_((target == self.ignore_index).unsqueeze(1), 0) return F.kl_div(output, smoothed, reduction=self.reduction) def extra_repr(self): return f"label_smoothing={self.label_smoothing}" class MeanPool1D(nn.Module): """Do a mean pool while accounting for the length of a sequence """ def __init__(self, outsz, batch_first=True): """Set up pooling module :param outsz: The output dim, for dowstream access :param batch_first: Is this module batch first or time first? """ super().__init__() self.batch_first = batch_first self.reduction_dim = 1 if self.batch_first else 0 self.output_dim = outsz self.requires_length = True def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Apply mean pooling on the valid inputs :param inputs: A tuple of `(input, lengths)` :return: Pooled output """ tensor, lengths = tensor_and_lengths(inputs) # Regardless of whether the input is `[B, T, H]` or `[T, B, H]` the shape after # the sum is `[B, H]` so the lengths (of shape `[B]`) should be unsqueezed to # `[B, 1]` in order to broadcast return torch.sum(tensor, self.reduction_dim, keepdim=False) / torch.unsqueeze(lengths, -1).to(tensor.dtype).to( tensor.device ) def extra_repr(self): return f"batch_first={self.batch_first}" class MaxPool1D(nn.Module): """Do a max-pooling operation with or without a length given """ def __init__(self, outsz, batch_first=True): super().__init__() self.batch_first = batch_first self.reduction_dim = 1 if self.batch_first else 0 self.output_dim = outsz def forward(self, inputs: Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]) -> torch.Tensor: """If we are given a tuple as input, we will use the length, otherwise we will do an operation without masking :param inputs: either a tuple of `(input, lengths)` or a tensor `input` :return: A pooled tensor """ tensor, lengths = tensor_and_lengths(inputs) if lengths is not None: # If tensor = `[B, T, H]` # mask = `[B, T, 1]` # If tensor = `[T, B, H]` # mask = `[T, B, 1]` # So it will mask all the values in H past the right length mask = sequence_mask(lengths).to(tensor.device) mask = mask if self.batch_first else bth2tbh(mask) # Fill masked with very negative so it never gets selected tensor = tensor.masked_fill(mask.unsqueeze(-1) == MASK_FALSE, -1e4) dmax, _ = torch.max(tensor, self.reduction_dim, keepdim=False) return dmax def extra_repr(self) -> str: return f"batch_first={self.batch_first}" # Torch only added this module in 1.4.0, shim class GeLU(nn.Module): def __init__(self): super().__init__() def forward(self, x): return torch.nn.functional.gelu(x) #Code taken from: https://github.com/huggingface/transformers/blob/766d4bf7920213bdd8a8afb42a72719190124568/src/transformers/activations.py#L27 class Gpt2GELU(nn.Module): """ Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415 """ def forward(self, input): return 0.5 * input * (1.0 + torch.tanh(math.sqrt(2.0 / math.pi) * (input + 0.044715 * torch.pow(input, 3.0)))) def get_activation(name: str = "relu") -> nn.Module: """Get back an `nn.Module` by string name of the activation operator :param name: A string name of the operation :return: A module associated with that string """ if name is None or name == "ident": return nn.Identity() if name == "tanh": return nn.Tanh() if name == "gelu": return GeLU() if name == "hardtanh": return nn.Hardtanh() if name == "leaky_relu": return nn.LeakyReLU() if name == "prelu": return nn.PReLU() if name == "sigmoid": return nn.Sigmoid() if name == "log_sigmoid": return nn.LogSigmoid() if name == "log_softmax": return nn.LogSoftmax(dim=-1) if name == "softmax": return nn.Softmax(dim=-1) if name == "gpt2_gelu": return Gpt2GELU() return nn.ReLU() def _cat_dir(h: torch.Tensor) -> torch.Tensor: """Concat forward and backword state vectors. The shape of the hidden is `[#layers * #dirs, B, H]`. The docs say you can separate directions with `h.view(#l, #dirs, B, H)` with the forward dir being index 0 and backwards dir being 1. This means that before separating with the view the forward dir are the even indices in the first dim while the backwards dirs are the odd ones. Here we select the even and odd values and concatenate them :param h: The hidden shape as it comes back from PyTorch modules """ return torch.cat([h[0 : h.size(0) : 2], h[1 : h.size(0) : 2]], dim=-1) def concat_state_dirs(state): """Convert the bidirectional out of an RNN so the forward and backward values are a single vector.""" if isinstance(state, tuple): return tuple(_cat_dir(h) for h in state) return _cat_dir(state) class Conv1DSame(nn.Module): """Perform a 1D convolution with output size same as input size To make this operation work as expected, we cannot just use `padding=kernel_size//2` inside of the convolution operation. Instead, we zeropad the input using the `ConstantPad1d` module """ def __init__(self, in_channels: int, out_channels: int, kernel_size: int, bias: bool = True, groups: int = 1, unif: float = 0.0, initializer: Optional[str] = None, activation: Optional[str] = None): """Create a 1D conv to produce the same output size as input :param in_channels: The number of input feature maps :param out_channels: The number of output feature maps :param kernel_size: The kernel size :param bias: Is bias on? :param groups: Number of conv groups """ super().__init__() end_pad = kernel_size // 2 start_pad = end_pad - 1 if kernel_size % 2 == 0 else end_pad self.conv = nn.Sequential( nn.ConstantPad1d((start_pad, end_pad), 0.), pytorch_conv1d(in_channels, out_channels, kernel_size, unif=unif, initializer=initializer, bias=bias, groups=groups), get_activation(activation) ) def forward(self, x: torch.Tensor) -> torch.Tensor: """Do convolution1d on an input tensor, `[B, C, T]` :param x: The input tensor of shape `[B, C, T]` :return: The output tensor of shape `[B, H, T]` """ return self.conv(x) class ConvEncoder(nn.Module): """1D Convolutional layer encoder with given activation function, optional dropout This module takes in a temporal signal of either shape `[B, C, T]` or `[B, T, C]`, depending on the constructor and produces an output signal of the same orientation (`[B, H, T]` or `[B, T, H]`, respectively). We default to `[B, T, H]` orientation to make it more convenient for typical layout, but this requires transposing the last 2 dims before and after the convolution operation. """ def __init__(self, insz: int, outsz: int, filtsz: int, pdrop: float = 0.0, activation: str = "relu", bias: bool = True, groups: int = 1, hidden_last=True): """Construct the encoder with optional dropout, given activation, and orientation :param insz: The number of input feature maps :param outsz: The number of output feature maps (or hidden size) :param filtsz: The kernel size :param pdrop: The amount of dropout to apply, this defaults to 0 :param activation: The activation function by name, defaults to `relu` :param bias: Use bias? :param groups: How many conv groups. Defaults to 1 :param hidden_last: PyTorch only! If `True` the orientatiation is `[B, T, H]`, o.w. `[B, H, T]` expected """ super().__init__() self.output_dim = outsz conv = Conv1DSame(insz, outsz, filtsz, bias=bias, groups=groups) act = get_activation(activation) dropout = nn.Dropout(pdrop) if hidden_last: self.conv = nn.Sequential(BTH2BHT(), conv, act, dropout, BHT2BTH()) else: self.conv = nn.Sequential(conv, act, dropout) def forward(self, input: torch.Tensor) -> torch.Tensor: return self.conv(input) class ConvEncoderStack(nn.Module): """Create a stack of convolutional encoders with residual connections between, using the `ConvEncoder` underneath This creates an encoder stack of convolutions, finally returning the last temporal output. Each layer uses zero-padding which causes the output of the convolution at each layer to be the same length. As in the `ConvEncoder` we support input tensor shapes of `[B, C, T]` or `[B, T, C]` depending on the constructor initialization, and transpose underneath the input and output of the stack if the orientation is defaulted to `[B, T, C]` """ def __init__(self, insz: int, outsz: int, filtsz: int, nlayers: int = 1, pdrop: float = 0.0, activation: str = "relu", bias: bool = True, groups: int = 1, hidden_last=True): """Construct the encoder stack :param insz: The input number of feature maps :param outsz: The output number of feature maps :param filtsz: The kernel size :param nlayers: The number of layers in the stack (defaults to a single layer) :param pdrop: The amount of dropout to apply (defaults to `0`) :param activation: The activation function to use as a string, defaults to `relu` :param bias: Use bias? :param groups: How many conv groups. Defaults to 1 :param hidden_last: PyTorch only! If `True` the orientatiation is `[B, T, H]`, o.w. `[B, H, T]` expected """ super().__init__() if hidden_last: first_layer = nn.Sequential(BTH2BHT(), ConvEncoder(insz, outsz, filtsz, pdrop, activation, bias, groups, hidden_last=False)) else: first_layer = ConvEncoder(insz, outsz, filtsz, pdrop, activation, bias, groups, hidden_last=False) subsequent_layer = ResidualBlock(ConvEncoder(outsz, outsz, filtsz, pdrop, activation, bias, groups, hidden_last=False)) self.layers = nn.ModuleList([first_layer] + [copy.deepcopy(subsequent_layer) for _ in range(nlayers - 1)]) if hidden_last: self.layers.append(BHT2BTH()) self.output_dim = outsz def forward(self, input: torch.Tensor) -> torch.Tensor: """Apply a stack of 1D convolutions with residual connections between them :param input: A tensor of shape `[B, T, C]` or `[B, C, T]` depending on value of `hidden_last` :return: A tensor of shape `[B, T, H]` or `[B, H, T]` depending on the value of `hidden_last` """ x = input for layer in self.layers: x = layer(x) return x def bth2bht(t: torch.Tensor) -> torch.Tensor: """Transpose the 2nd and 3rd dim of a tensor""" return t.transpose(1, 2).contiguous() class BTH2BHT(nn.Module): """Utility layer to convert from `[B, T, H]` to `[B, H, T]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return bth2bht(t) def tbh2bht(t: torch.Tensor) -> torch.Tensor: """Permute the dimensions, first goes to third, second goes to first, last moves to second""" return t.permute(1, 2, 0).contiguous() class TBH2BHT(nn.Module): """Utility layer to convert from `[T, B, H]` to `[B, H, T]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return tbh2bht(t) def tbh2bth(t: torch.Tensor) -> torch.Tensor: """Transpose the first 2 dims""" return t.transpose(0, 1).contiguous() class TBH2BTH(nn.Module): """Utility layer to convert from `[T, B, H]` to `[B, T, H]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return tbh2bth(t) def bth2tbh(t: torch.Tensor) -> torch.Tensor: """Transpose the first 2 dims""" return t.transpose(0, 1).contiguous() class BTH2TBH(nn.Module): """Utility layer to convert from `[B, T, H]` to `[T, B, H]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return bth2tbh(t) def bht2bth(t: torch.Tensor) -> torch.Tensor: return t.transpose(1, 2).contiguous() class BHT2BTH(nn.Module): """Utility layer to convert from `[B, H, T]` to `[B, T, H]` """ def __init__(self): super().__init__() def forward(self, t: torch.Tensor) -> torch.Tensor: return bht2bth(t) class ParallelConv(nn.Module): """Layer of parallel convolutions with varying filter sizes followed by max over time pooling This module takes an input tensor of any orientation based on its constructor, and pools its output to shape `[B, H]`, where `H` is `outsz * len(filtsz)` """ def __init__(self, insz: int, outsz: int, filtsz: List[int], activation: str = "relu", input_fmt: str = "bth"): """ Constructor for a parallel convolution from any orientation tensor input :param insz: The number of input feature maps :param outsz: The number of output feature maps :param filtsz: The kernel size as a list of parallel filters to apply, e.g. `[3, 4, 5]` :param activation: An activation function by name to apply :param input_fmt: A string for the orientation. Valid values are `bth` or `btc` meaning hidden units last, `bht` or `bct` meaning the temporal dim last or `tbh` or `tbc` meaning the hidden units last and the temporal dim first """ super().__init__() self.requires_length = False convs = [] outsz_filts = outsz self.input_fmt = input_fmt.lower() if type(outsz) == int: outsz_filts = len(filtsz) * [outsz] self.output_dim = sum(outsz_filts) for i, fsz in enumerate(filtsz): if fsz % 2 == 0: conv = Conv1DSame(insz, outsz_filts[i], fsz) else: pad = fsz // 2 conv = nn.Conv1d(insz, outsz_filts[i], fsz, padding=pad) conv = nn.Sequential( conv, get_activation(activation) ) convs.append(conv) # Add the module so its managed correctly self.convs = nn.ModuleList(convs) def transform_input(self, t: torch.Tensor) -> torch.Tensor: if self.input_fmt == "bth" or self.input_fmt == "btc": return bth2bht(t) elif self.input_fmt == "tbh" or self.input_fmt == "tbc": return tbh2bht(t) else: return t def forward(self, inputs: torch.Tensor) -> torch.Tensor: """Transform the input to `[B, C, T]` from any orientation and perform parallel 1D convs and max over time pool :param inputs: An input tensor of any format specified in the constructor :return: A `[B, H]` tensor representing the pooled outputs """ mots = [] input_bct = self.transform_input(inputs) for conv in self.convs: # In Conv1d, data BxCxT, max over time conv_out = conv(input_bct) mot, _ = conv_out.max(2) mots.append(mot) mots = torch.cat(mots, 1) return mots # self.conv_drop(mots) class Highway(nn.Module): """Highway layer as defined in https://arxiv.org/abs/1505.00387 """ def __init__(self, input_size: int, **kwargs): """Highway layer constructor :param input_size: The input hidden size :param kwargs: """ super().__init__() self.proj = nn.Linear(input_size, input_size) self.transform = nn.Linear(input_size, input_size) self.transform.bias.data.fill_(-2.0) self.output_dim = input_size def forward(self, input: torch.Tensor) -> torch.Tensor: """Take a tensor in and produce the highway layer output :param input: Input tensor :return: output tensor """ proj_result = torch.relu(self.proj(input)) proj_gate = torch.sigmoid(self.transform(input)) gated = (proj_gate * proj_result) + ((1 - proj_gate) * input) return gated def pytorch_linear(in_sz: int, out_sz: int, unif: float = 0, initializer: str = None, bias: bool = True): """Utility function that wraps a linear (AKA dense) layer creation, with options for weight init and bias""" l = nn.Linear(in_sz, out_sz, bias=bias) if unif > 0: l.weight.data.uniform_(-unif, unif) elif initializer == "ortho": nn.init.orthogonal(l.weight) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(l.weight) else: nn.init.xavier_uniform_(l.weight) if bias: l.bias.data.zero_() return l class StackedLSTMCell(nn.Module): """A stacked LSTM cells applied at a timestep """ def __init__(self, num_layers: int, input_size: int, rnn_size: int, dropout: float): super().__init__() self.dropout = nn.Dropout(dropout) self.num_layers = num_layers self.layers = nn.ModuleList() for i in range(num_layers): self.layers.append(nn.LSTMCell(input_size=input_size, hidden_size=rnn_size, bias=False)) input_size = rnn_size def forward(self, input: torch.Tensor, hidden: torch.Tensor): """Apply a stack of LSTMs :param input: The input to the first LSTM `[B, H]` :param hidden: The previous `(h, c)` where `h=(h_0, h_1,..)`, `c=(c_0, c_1,..)` :return: The output and hidden `(h, c)` where `h=(h_0, h_1,..)`, `c=(c_0, c_1,..)` """ h_0, c_0 = hidden hs, cs = [], [] for i, layer in enumerate(self.layers): h_i, c_i = layer(input, (h_0[i], c_0[i])) input = h_i if i != self.num_layers - 1: input = self.dropout(input) hs.append(h_i) cs.append(c_i) hs = torch.stack(hs) cs = torch.stack(cs) return input, (hs, cs) class StackedGRUCell(nn.Module): """A stacked GRU cells applied at a timestep """ def __init__(self, num_layers: int, input_size: int, rnn_size: int, dropout: float): super().__init__() self.dropout = nn.Dropout(dropout) self.num_layers = num_layers self.layers = nn.ModuleList() for i in range(num_layers): self.layers.append(nn.GRUCell(input_size=input_size, hidden_size=rnn_size)) input_size = rnn_size def forward(self, input: torch.Tensor, hidden: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: """Apply a stack of GRUs :param input: The input to the first LSTM `[B, H]` :param hidden: The previous `h` where `h=(h_0, h_1,..)` :return: The output and hidden `h` where `h=(h_0, h_1,..)` """ h_0 = hidden hs = [] for i, layer in enumerate(self.layers): h_i = layer(input, (h_0[i])) input = h_i if i != self.num_layers: input = self.dropout(input) hs.append(h_i) hs = torch.stack(hs) return input, hs class Dense(nn.Module): """Dense (Linear) layer with optional activation given This module is the equivalent of the tf.keras.layer.Dense, module with optional activations applied """ def __init__( self, insz: int, outsz: int, activation: Optional[str] = None, unif: float = 0, initializer: Optional[str] = None, ): """Constructor for "dense" or "linear" layer, with optional activation applied :param insz: The number of hidden units in the input :param outsz: The number of hidden units in the output :param activation: The activation function by name, defaults to `None`, meaning no activation is applied :param unif: An optional initialization value which can set the linear weights. If given, biases will init to 0 :param initializer: An initialization scheme by string name: `ortho`, `kaiming` or `he`, `xavier` or `glorot` """ super().__init__() self.layer = pytorch_linear(insz, outsz, unif, initializer) self.activation = get_activation(activation) self.output_dim = outsz def forward(self, input: torch.Tensor) -> torch.Tensor: """Run a linear projection over the input, followed by an optional activation given by constructor :param input: the input tensor :return: the transformed output """ return self.activation(self.layer(input)) class WeightTieDense(nn.Module): """Do weight tying from the input parameter This module never copies the weight pointer, it lazily accesses to allow the tied variable to reset its parameters after initialization. This is helpful for cases where we have LMs and are reloading them after they have been initially created """ def __init__(self, tie: nn.Module, bias=False): super().__init__() self.tie = tie self.transform = self._get_transform(tie) if bias: bias = torch.nn.Parameter(torch.zeros(self.transform(self.weight.shape[0]))) else: bias = None self.register_parameter("bias", bias) def _get_transform(self, tie: nn.Module): emb = getattr(tie, "embeddings", None) if emb is not None: return self._identity return self._transpose @property def weight(self): emb = getattr(self.tie, "embeddings", None) if emb is not None: return getattr(emb, "weight") return getattr(self.tie, "weight") def _identity(self, x: torch.Tensor) -> torch.Tensor: return x def _transpose(self, x: torch.Tensor) -> torch.Tensor: return x.transpose(0, 1).contiguous() def forward(self, input: torch.Tensor) -> torch.Tensor: return F.linear(input, self.transform(self.weight), self.bias) class ResidualBlock(nn.Module): """Create a residual block by wrapping an layer with a residual connection""" def __init__(self, layer: Optional[nn.Module] = None, **kwargs): """Wrap an layer with a residual connection :param layer: This layer will be applied to the input and added to the input :param kwargs: """ super().__init__() self.layer = layer if self.layer is not None and hasattr(layer, "output_dim"): self.output_dim = layer.output_dim def forward(self, input: torch.Tensor) -> torch.Tensor: """Apply a residual block :param input: A tensor to use as input and to add to output :return: The residual connection output """ return input + self.layer(input) class SkipConnection(ResidualBlock): """Subclass of ResidualBlock(Dense) with an activation function given """ def __init__(self, input_size: int, activation: str = "relu"): """Create a `SkipConnection` :param input_size: The input dimension size :param activation: A string activation name """ super().__init__(None) self.layer = Dense(input_size, input_size, activation=activation) self.output_dim = input_size def rnn_cell(insz: int, hsz: int, rnntype: str, nlayers: int, dropout: float): """This is a wrapper function around a stacked RNN cell :param insz: The input dimensions :param hsz: The hidden dimensions :param rnntype: An RNN type `gru` or `lstm` :param nlayers: The number of layers to stack :param dropout: The amount of dropout :return: """ if rnntype == "gru": rnn = StackedGRUCell(nlayers, insz, hsz, dropout) else: rnn = StackedLSTMCell(nlayers, insz, hsz, dropout) return rnn def pytorch_lstm( insz: int, hsz: int, rnntype: str, nlayers: int, dropout: float, unif: float = 0, batch_first: bool = False, initializer: str = None, ) -> torch.nn.LSTM: """Wrapper around `torch.nn.LSTM`, mainly for weight initialization options :param insz: The input dimension :param hsz: The number of hidden units :param rnntype: A string description of the type of LSTM: `bi?lstm` or `lstm` :param nlayers: The number of layers :param dropout: How much dropout to apply :param unif: if uniform initialization, what range? :param batch_first: Should we do the RNN batch first or time first :param initializer: An optional string representing a style of initialization `ortho`, `he`/`kaiming`, `xavier`/`glorot` :return: An LSTM """ if nlayers == 1: dropout = 0.0 ndir = 2 if rnntype.startswith("b") else 1 layer_hsz = hsz // ndir rnn = torch.nn.LSTM( insz, layer_hsz, nlayers, dropout=dropout, bidirectional=True if ndir > 1 else False, batch_first=batch_first ) # , bias=False) if initializer == "ortho": nn.init.orthogonal(rnn.weight_hh_l0) nn.init.orthogonal(rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(rnn.weight_hh_l0) nn.init.kaiming_uniform(rnn.weight_ih_l0) elif unif > 0: for weight in rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(rnn.weight_hh_l0) nn.init.xavier_uniform_(rnn.weight_ih_l0) return rnn class LSTMEncoderBase(nn.Module): """The LSTM encoder is a base for a set of encoders producing various outputs. All LSTM encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`) *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `LSTMEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `LSTMEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of LSTMs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per LSTM :param nlayers: The number of layers of LSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: PyTorch only! Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.LSTM(insz, hsz, nlayers, dropout=pdrop, bidirectional=False, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz # def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: # tbc, lengths = tensor_and_lengths(inputs) # packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths, batch_first=self.batch_first) # output, hidden = self.rnn(packed) # output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) # return self.output_fn(output, hidden) # def output_fn(self, output, state): # return output, self.extract_top_state(state) def extract_top_state(self, state: Tuple[torch.Tensor, torch.Tensor]) -> List[torch.Tensor]: """Get a view of the top state of shape [B, H]` :param state: :return: """ # Select the topmost state with -1 and the only direction is forward (select with 0) top = [] for s in state: top.append(s.view(self.nlayers, 1, -1, self.output_dim)[-1, 0]) return top class LSTMEncoderSequence(LSTMEncoderBase): """LSTM encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of LSTMs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class LSTMEncoderWithState(nn.Module): """LSTM encoder producing the hidden state and the output, where the input doesnt require any padding PyTorch note: This type of encoder doesnt inherit the `LSTMEncoderWithState` base """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """ :param insz: The size of the input :param hsz: The number of hidden units per LSTM :param nlayers: The number of layers of LSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param batch_first: PyTorch only! do batch first or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = False self.requires_state = True self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.LSTM(insz, hsz, nlayers, dropout=pdrop, bidirectional=False, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz def forward(self, input_and_prev_h: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param input_and_prev_h: The input at this timestep and the previous hidden unit or `None` :return: Raw `torch.nn.LSTM` output """ inputs, hidden = input_and_prev_h output, hidden = self.rnn(inputs, hidden) return output, hidden ##concat_state_dirs(hidden) class LSTMEncoderAll(LSTMEncoderBase): """LSTM encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a tuple of hidden vector `[L, B, H]` and context vector `[L, B, H]`, respectively *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H]` or `[B, H, S]` , and tuple of hidden `[L, B, H]` and context `[L, B, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, hidden class LSTMEncoderHidden(LSTMEncoderBase): """LSTM encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(hidden)[0] # TODO: this module only exists in pytorch. Do we eliminate it or put it in both? class LSTMEncoderSequenceHiddenContext(LSTMEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, self.extract_top_state(hidden) class BiLSTMEncoderBase(nn.Module): """BiLSTM encoder base for a set of encoders producing various outputs. All BiLSTM encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`). Because its bidirectional, half of the hidden units given in the constructor will be applied to the forward direction and half to the backward direction, and these will get concatenated. *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `BiLSTMEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `BiLSTMEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of LSTMs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per BiLSTM (`hsz//2` used for each direction and concatenated) :param nlayers: The number of layers of BiLSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.LSTM(insz, hsz // 2, nlayers, dropout=pdrop, bidirectional=True, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz def extract_top_state(self, state): # Select the topmost state with -1 and the only direction is forward (select with 0) return tuple(s.view(self.nlayers, 1, -1, self.output_dim)[-1, 0] for s in state) # TODO: this module only exists in pytorch. Do we eliminate it or put it in both? class BiLSTMEncoderSequenceHiddenContext(BiLSTMEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, self.extract_top_state(concat_state_dirs(hidden)) class BiLSTMEncoderAll(BiLSTMEncoderBase): """BiLSTM encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a tuple of hidden vector `[L, B, H]` and context vector `[L, B, H]`, respectively *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H] or `[B, H, S]` , and tuple of hidden `[L, B, H]` and context `[L, B, H]` """ tensor, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tensor, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, concat_state_dirs(hidden) class BiLSTMEncoderSequence(BiLSTMEncoderBase): """BiLSTM encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of LSTMs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` """ tensor, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tensor, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class BiLSTMEncoderHidden(BiLSTMEncoderBase): """BiLSTM encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs): """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tensor, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tensor, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(concat_state_dirs(hidden))[0] # TODO: Add this to TF or remove class BiLSTMEncoderHiddenContext(BiLSTMEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(concat_state_dirs(hidden)) class GRUEncoderBase(nn.Module): """The GRU encoder is a base for a set of encoders producing various outputs. All GRU encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`) *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `GRUEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `GRUEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of GRUs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per GRU :param nlayers: The number of layers of GRUs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: PyTorch only! Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.GRU(insz, hsz, nlayers, dropout=pdrop, bidirectional=False, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal_(self.rnn.weight_ih_l0) nn.init.orthogonal_(self.rnn.weight_hh_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform_(self.rnn.weight_ih_l0) nn.init.kaiming_uniform_(self.rnn.weight_hh_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_ih_l0) nn.init.xavier_uniform_(self.rnn.weight_hh_l0) self.output_dim = hsz def extract_top_state(self, state: torch.Tensor) -> torch.Tensor: return state[-1] class GRUEncoderSequence(GRUEncoderBase): """GRU encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of the sequence tensor `[T, B, H]` or `[B, T, H]` and its length, produce output sequence :param inputs: A tuple of the sequence tensor and its length :return: A sequence tensor of shape `[T, B, H]` or `[B, T, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class GRUEncoderAll(GRUEncoderBase): """GRU encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a hidden vector `[L, B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H]` or `[B, H, S]` , and a hidden tensor `[L, B, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, hidden class GRUEncoderHidden(GRUEncoderBase): """GRU encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(hidden) class BiGRUEncoderBase(nn.Module): """BiGRU encoder base for a set of encoders producing various outputs. All BiGRU encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`). Because its bidirectional, half of the hidden units given in the constructor will be applied to the forward direction and half to the backward direction, and these will get concatenated. *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `BiGRUEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `BiGRUEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. """ def __init__( self, insz: int, hsz: int, nlayers: int, pdrop: float = 0.0, requires_length: bool = True, batch_first: bool = False, unif: float = 0, initializer: str = None, **kwargs, ): """Produce a stack of GRUs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per BiGRU (`hsz//2` used for each direction and concatenated) :param nlayers: The number of layers of BiGRUs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN """ super().__init__() self.requires_length = requires_length self.batch_first = batch_first self.nlayers = nlayers if nlayers == 1: pdrop = 0.0 self.rnn = torch.nn.GRU(insz, hsz // 2, nlayers, dropout=pdrop, bidirectional=True, batch_first=batch_first) if initializer == "ortho": nn.init.orthogonal(self.rnn.weight_hh_l0) nn.init.orthogonal(self.rnn.weight_ih_l0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform(self.rnn.weight_hh_l0) nn.init.kaiming_uniform(self.rnn.weight_ih_l0) elif unif > 0: for weight in self.rnn.parameters(): weight.data.uniform_(-unif, unif) else: nn.init.xavier_uniform_(self.rnn.weight_hh_l0) nn.init.xavier_uniform_(self.rnn.weight_ih_l0) self.output_dim = hsz def extract_top_state(self, state: torch.Tensor) -> torch.Tensor: # Select the topmost state with -1 and the only direction is forward (select with 0) return state[-1] # TODO: normalize across backends or remove class BiGRUEncoderSequenceHiddenContext(BiGRUEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, self.extract_top_state(_cat_dir(hidden)) class BiGRUEncoderAll(BiGRUEncoderBase): """BiGRU encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a hidden vector `[L, B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H] or `[B, H, S]` , and a hidden vector `[L, B, H]` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output, _cat_dir(hidden) class BiGRUEncoderSequence(BiGRUEncoderBase): """BiGRU encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. """ def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of GRUs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return output class BiGRUEncoderHidden(BiGRUEncoderBase): """GRU encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` """ def forward(self, inputs): """ :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state """ tbc, lengths = inputs packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths.cpu(), batch_first=self.batch_first) output, hidden = self.rnn(packed) output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) return self.extract_top_state(_cat_dir(hidden)) class Reduction(nn.Module): def __init__(self): super().__init__() def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: pass def set_output_dim(self, output_dims: List[int]): pass class ConcatReduction(Reduction): def __init__(self, output_dims: List[int], axis=-1, **kwargs): super().__init__() self.axis = axis self.set_output_dim(output_dims) def set_output_dim(self, output_dims: List[int]): self.output_dim = sum(output_dims) def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: return torch.cat(inputs, self.axis) class ConcatSubtractReduction(Reduction): """This reduction assumes paired input and subtracts the two to get a distance It is useful for training sentence encoders and is used, for example, in SentenceBERT For this to work we assume that the inputs are paired, and subtract them """ def __init__(self, output_dims: List[int], axis=-1, **kwargs): super().__init__() self.axis = axis self.set_output_dim(output_dims) def set_output_dim(self, output_dims: List[int]): self.output_dim = 3 * output_dims[0] def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: sub = torch.abs(inputs[0] - inputs[1]) return torch.cat([inputs[0], inputs[1], sub], self.axis) class SumReduction(Reduction): def __init__(self, output_dims: List[int], **kwargs): super().__init__() self.set_output_dim(output_dims) def set_output_dim(self, output_dims: List[int]): # We could actually project if we needed, or at least should validate self.output_dim = output_dims[0] def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: return sum(inputs) class SumLayerNormReduction(Reduction): def __init__(self, output_dims: List[int], layer_norm_eps: float = 1.0e-12, **kwargs): super().__init__() self.set_output_dim(output_dims) self.ln = nn.LayerNorm(self.output_dim, eps=layer_norm_eps) def set_output_dim(self, output_dims: List[int]): self.output_dim = output_dims[0] def forward(self, inputs: List[torch.Tensor]) -> torch.Tensor: output = sum(inputs) return self.ln(output) class EmbeddingsStack(nn.Module): def __init__( self, embeddings_dict: Dict[str, nn.Embedding], dropout_rate: float = 0.0, requires_length: bool = False, reduction: Optional[Union[str, nn.Module]] = 'concat', **kwargs, ): """Takes in a dictionary where the keys are the input tensor names, and the values are the embeddings :param embeddings_dict: dictionary of each feature embedding :param dropout_rate: The dropout rate (0.0 means no dropout, 1.0 means complete) """ super().__init__() self._keys: List[str] = [] embeddings_list = [] output_dims = [] for k, embedding in embeddings_dict.items(): embeddings_list.append(embedding) self._keys.append(k) output_dims += [embedding.get_dsz()] self.embeddings: nn.ModuleList = nn.ModuleList(embeddings_list) # TODO: should we make a registry of options? if isinstance(reduction, str): if reduction == 'sum': self.reduction = SumReduction(output_dims) elif reduction == 'sum-layer-norm': self.reduction = SumLayerNormReduction(output_dims, layer_norm_eps=kwargs.get('layer_norm_eps', 1.0e-12)) elif reduction == 'concat-subtract': self.reduction = ConcatSubtractReduction(output_dims) else: self.reduction = ConcatReduction(output_dims) else: self.reduction = reduction self.reduction.set_output_dim(output_dims) self.dsz = self.reduction.output_dim self.dropout = nn.Dropout(dropout_rate) self.requires_length = requires_length def __getitem__(self, item: str) -> nn.Module: idx = self._keys.index(item) if idx < 0: raise Exception(f"Invalid item ({item})") return self.embeddings[idx] def forward(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: """This method performs "embedding" of the inputs. The base method here then concatenates along depth dimension to form word embeddings :return: A 3-d vector where the last dimension is the concatenated dimensions of all embeddings """ all_embeddings_out = [] i = 0 for embedding in self.embeddings: k = self._keys[i] x = inputs[k] # Its a hair faster to do this than using isinstance if x.__class__ == tuple: embeddings_out = embedding(*x) else: embeddings_out = embedding(x) all_embeddings_out.append(embeddings_out) i += 1 word_embeddings = self.reduction(all_embeddings_out) return self.dropout(word_embeddings) def keys(self): return self._keys @property def output_dim(self): return self.dsz def items(self): for k, v in zip(self.keys(), self.embeddings): yield k, v class DenseStack(nn.Module): """A stack of one or more hidden layers """ def __init__( self, insz: int, hsz: Union[int, List[int]], activation: Union[str, List[str]] = "relu", pdrop_value: float = 0.5, init=None, skip_connect=False, layer_norm=False, **kwargs, ): """Stack 1 or more hidden layers, optionally (forming an MLP) :param insz: The number of input units :param hsz: The number of hidden units :param activation: The name of the activation function to use :param pdrop_value: The dropout probability :param init: The initializer :param skip_connect: whether use skip connection when insz is equal to outsz for a layer :param layer_norm: whether use layer norm in each layer """ super().__init__() hszs = listify(hsz) self.output_dim = hsz[-1] activations = listify(activation) if len(activations) == 1: activations = activations * len(hszs) if len(activations) != len(hszs): raise ValueError("Number of activations must match number of hidden sizes in a stack!") current = insz layer_stack = [] if layer_norm: layer_norm_eps = kwargs.get('layer_norm_eps', 1e-6) for hsz, activation in zip(hszs, activations): if skip_connect and current == hsz: layer = SkipConnection(current, activation) else: layer = Dense(current, hsz, activation) if layer_norm: layer = nn.Sequential(layer, nn.LayerNorm(hsz, eps=layer_norm_eps)) layer_stack.append(WithDropout(layer, pdrop_value)) current = hsz self.layer_stack = nn.Sequential(*layer_stack) self.requires_length = False def forward(self, inputs: torch.Tensor) -> torch.Tensor: """Stack 1 or more hidden layers, optionally (forming an MLP) :param inputs: The fixed representation of the model :Keyword Arguments: * *hsz* -- (``int``) The number of hidden units (defaults to `100`) :return: The final layer """ return self.layer_stack(inputs) class VectorSequenceAttention(nn.Module): def __init__(self, hsz: int): super().__init__() self.hsz = hsz self.W_c = nn.Linear(2 * self.hsz, hsz, bias=False) def forward(self, query_t, keys_bth, values_bth, keys_mask=None): # Output(t) = B x H x 1 # Keys = B x T x H # a = B x T x 1 a = self._attention(query_t, keys_bth, keys_mask) attended = self._update(a, query_t, values_bth) return attended def _attention(self, query_t, keys_bth, keys_mask): pass def _update(self, a, query_t, values_bth): # a = B x T # Want to apply over context, scaled by a # (B x 1 x T) (B x T x H) = (B x 1 x H) a = a.view(a.size(0), 1, a.size(1)) c_t = torch.bmm(a, values_bth).squeeze(1) attended = torch.cat([c_t, query_t], -1) attended = torch.tanh(self.W_c(attended)) return attended def dot_product_attention_weights(query_t: torch.Tensor, keys_bth: torch.Tensor, keys_mask: torch.Tensor) -> torch.Tensor: a = keys_bth @ query_t.unsqueeze(2) a = a.squeeze(2).masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a def dot_product_attention_weights_lengths(query_t: torch.Tensor, keys_bth: torch.Tensor, keys_lengths: torch.Tensor) -> torch.Tensor: mask = sequence_mask(keys_lengths, keys_bth.shape[1]).to(keys_bth.device) return dot_product_attention_weights(query_t, keys_bth, mask) class LuongDotProductAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) def _attention(self, query_t, keys_bth, keys_mask): return dot_product_attention_weights(query_t, keys_bth, keys_mask) class ScaledDotProductAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) def _attention(self, query_t, keys_bth, keys_mask): a = (keys_bth @ query_t.unsqueeze(2)) / math.sqrt(self.hsz) a = a.squeeze(2).masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a class LuongGeneralAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) self.W_a = nn.Linear(self.hsz, self.hsz, bias=False) def _attention(self, query_t, keys_bth, keys_mask): a = keys_bth @ self.W_a(query_t).unsqueeze(2) a = a.squeeze(2).masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a class BahdanauAttention(VectorSequenceAttention): def __init__(self, hsz): super().__init__(hsz) self.hsz = hsz self.W_a = nn.Linear(self.hsz, self.hsz, bias=False) self.E_a = nn.Linear(self.hsz, self.hsz, bias=False) self.v = nn.Linear(self.hsz, 1, bias=False) def _attention(self, query_t, keys_bth, keys_mask): B, T, H = keys_bth.shape q = self.W_a(query_t.view(-1, self.hsz)).view(B, 1, H) u = self.E_a(keys_bth).view(B, T, H) z = torch.tanh(q + u) a = self.v(z.view(-1, self.hsz)).view(B, T) a = a.masked_fill(keys_mask == MASK_FALSE, -1e9) a = F.softmax(a, dim=-1) return a def _update(self, a, query_t, values_bth): query_t = query_t.view(-1, self.hsz) # a = B x T # Want to apply over context, scaled by a # (B x 1 x T) (B x T x H) = (B x 1 x H) -> (B x H) a = a.view(a.size(0), 1, a.size(1)) c_t = (a @ values_bth).squeeze(1) # (B x 2H) attended = torch.cat([c_t, query_t], -1) attended = self.W_c(attended) return attended class FineTuneModel(nn.Module): def __init__(self, nc, embeddings, stack_model=None): super().__init__() if isinstance(embeddings, dict): self.finetuned = EmbeddingsStack(embeddings) else: self.finetuned = embeddings self.stack_model = stack_model output_dim = self.finetuned.output_dim if stack_model is None else stack_model.output_dim self.output_layer = Dense(output_dim, nc, activation="log_softmax") def forward(self, inputs): base_layers = self.finetuned(inputs) stacked = self.stack_model(base_layers) if self.stack_model is not None else base_layers return self.output_layer(stacked) class CompositePooling(nn.Module): """Composite pooling allows for multiple sub-modules during pooling to be used in parallel """ def __init__(self, models): """ Note, this currently requires that each submodel is an eight_mile model with an `output_dim` attr """ super().__init__() self.models = nn.ModuleList(models) self.output_dim = sum(m.output_dim for m in self.models) self.requires_length = any(getattr(m, "requires_length", False) for m in self.models) def forward(self, inputs): inputs, lengths = tensor_and_lengths(inputs) pooled = [] for sub_model in self.models: if getattr(sub_model, "requires_length", False): pooled.append(sub_model((inputs, lengths))) else: pooled.append(sub_model(inputs)) return torch.cat(pooled, -1) class EmbedPoolStackModel(nn.Module): """This provides an idiom for classification consisting of multiple phases In the first phase, we embed the input tensors, and subsequently pool them to a fixed width representation. Finally, we allow multiple hidden "stacking" layers, ultimately ending in a projection to the output space """ def __init__( self, nc: int, embeddings: nn.Module, pool_model: nn.Module, stack_model: Optional[nn.Module] = None, output_model: Optional[nn.Module] = None, ): super().__init__() self.embed_model = embeddings self.pool_model = pool_model self.stack_model = stack_model if stack_model else nn.Identity() output_dim = self.pool_model.output_dim if stack_model is None else stack_model.output_dim self.output_layer = Dense(output_dim, nc, activation="log_softmax") if output_model is None else output_model def forward(self, inputs: Dict[str, torch.Tensor]): lengths = inputs["lengths"] embedded = self.embed_model(inputs) embedded = (embedded, lengths) pooled = self.pool_model(embedded) stacked = self.stack_model(pooled) return self.output_layer(stacked) class PassThru(nn.Module): def __init__(self, input_dim): super().__init__() self.output_dim = input_dim def forward(self, inputs: torch.Tensor) -> torch.Tensor: return inputs class WithoutLength(nn.Module): """Wrapper layer to remove lengths from the input """ def __init__(self, layer: nn.Module): super().__init__() self.layer = layer self.output_dim = self.layer.output_dim if hasattr(self.layer, "output_dim") else 0 def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: return self.layer(inputs[0]) class WithDropout(nn.Module): """Wrapper for any layer that surrounds it with dropout""" def __init__(self, layer: nn.Module, pdrop: float = 0.5, variational=False, batch_first=False): """Create a dropout wrapper around the given layer :param layer: Some sort of layer :param pdrop: A dropout value """ super().__init__() self.layer = layer self.dropout = VariationalDropout(pdrop, batch_first=batch_first) if variational else nn.Dropout(pdrop) self.output_dim = self.layer.output_dim if hasattr(self.layer, "output_dim") else 0 def forward(self, inputs: torch.Tensor) -> torch.Tensor: """Apply the layer followed by dropout :param inputs: input tensor :return: output transformed by the held layer and subsequent dropout """ return self.dropout(self.layer(inputs)) class WithDropoutOnFirst(nn.Module): """Wrapper for any layer that surrounds it with dropout This exists primarily for the LSTMEncoderWithState to allow dropout on the output while passing back the hidden state """ def __init__(self, layer: nn.Module, pdrop: float = 0.5, variational=False): """Create a dropout wrapper around the given layer :param layer: Some sort of layer :param pdrop: A dropout value """ super().__init__() self.layer = layer self.dropout = VariationalDropout(pdrop) if variational else nn.Dropout(pdrop) self.output_dim = self.layer.output_dim if hasattr(self.layer, "output_dim") else 0 def forward(self, inputs: Tuple[torch.Tensor]) -> torch.Tensor: """Apply the layer followed by dropout :param inputs: input tensor :return: output transformed by the held layer and subsequent dropout """ outputs = self.layer(inputs) return self.dropout(outputs[0]), outputs[1] def transition_mask(vocab, span_type, s_idx, e_idx, pad_idx=None): """Create a mask to enforce span sequence transition constraints. Returns a Tensor with valid transitions as a 0 and invalid as a 1 for easy use with `masked_fill` """ np_mask = transition_mask_np(vocab, span_type, s_idx, e_idx, pad_idx=pad_idx) return torch.from_numpy(np_mask) == 0 @torch.jit.script def inplace_assign(data: torch.Tensor, index: torch.Tensor, new_data: torch.Tensor) -> torch.Tensor: new_data = new_data.unsqueeze(0) index = index.expand(1, new_data.size(1)) data.scatter_(0, index, new_data) return data @torch.jit.script def i2t(i: int) -> torch.Tensor: return torch.tensor(i).unsqueeze(0) @torch.jit.script def script_viterbi( unary: torch.Tensor, trans: torch.Tensor, start_idx: int, end_idx: int ) -> Tuple[torch.Tensor, torch.Tensor]: seq_len: int = unary.size(0) num_tags: int = unary.size(1) fill_value: float = -1e4 # dtype=unary.dtype fails, with prim_dtype error on torch 1.7.1 alphas = torch.full((num_tags,), fill_value, dtype=torch.float, device=unary.device) broadcast_idx = torch.full((num_tags,), start_idx, dtype=torch.long) alphas = alphas.scatter(0, broadcast_idx, torch.zeros((num_tags,))) alphas = alphas.unsqueeze(0) backpointers: torch.Tensor = torch.zeros(num_tags, dtype=torch.long).unsqueeze(0) for i in range(seq_len): unary_t = unary[i, :] next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 1) backpointers = torch.cat([backpointers, best_tag_ids.unsqueeze(0)], 0) alphas = (viterbi + unary_t).unsqueeze(0) terminal_vars = alphas.squeeze(0) + trans[end_idx, :] path_score, best_tag_id = torch.max(terminal_vars, 0) best_path = best_tag_id.unsqueeze(0) for i in range(unary.size(0)): t = seq_len - i - 1 best_tag_id = backpointers[t + 1, best_tag_id] best_path = torch.cat([best_path, best_tag_id.unsqueeze(0)], -1) new_path_vec = best_path.flip(0) return new_path_vec[1:], path_score class ViterbiBatchSize1(nn.Module): def __init__(self, start_idx: int, end_idx: int): super().__init__() self.start_idx = start_idx self.end_idx = end_idx def forward(self, unary: torch.Tensor, trans: torch.Tensor, _: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: unary = unary.squeeze(1) trans = trans.squeeze(0) path, score = script_viterbi(unary, trans, self.start_idx, self.end_idx) return path.unsqueeze(1), score class Viterbi(nn.Module): def __init__(self, start_idx: int, end_idx: int): super().__init__() self.start_idx = start_idx self.end_idx = end_idx # r, start_idx: int, end_idx: int, norm = lambda x, y: x def forward( self, unary: torch.Tensor, trans: torch.Tensor, lengths: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: """Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] :param trans: torch.FloatTensor: [1, N, N] :param norm: Callable: This function should take the initial and a dim to normalize along. :return: torch.LongTensor: [T, B] the padded paths :return: torch.FloatTensor: [B] the path scores """ seq_len, batch_size, tag_size = unary.size() min_length = torch.min(lengths) backpointers = [] # Alphas: [B, 1, N] alphas = torch.full((batch_size, 1, tag_size), -1e4, device=unary.device) alphas[:, 0, self.start_idx] = 0 # alphas = self.norm(alphas) for i, unary_t in enumerate(unary): next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 2) backpointers.append(best_tag_ids) new_alphas = viterbi + unary_t new_alphas.unsqueeze_(1) # This part generates a warning if i >= min_length: mask = (i < lengths).view(-1, 1, 1) alphas = alphas.masked_fill(mask, 0) + new_alphas.masked_fill(mask == MASK_FALSE, 0) else: alphas = new_alphas # Add end tag terminal_var = alphas.squeeze(1) + trans[:, self.end_idx, :] path_score, best_tag_id = torch.max(terminal_var, 1) # Flip lengths rev_len = seq_len - lengths - 1 best_path = [best_tag_id] for i in range(len(backpointers)): t = len(backpointers) - i - 1 backpointer_t = backpointers[t] # Get new best tag candidate new_best_tag_id = backpointer_t.gather(1, best_tag_id.unsqueeze(1)).squeeze(1) # We are going backwards now, if flipped length was passed # these you aren't in your real results yet mask = i > rev_len best_tag_id = best_tag_id.masked_fill(mask, 0) + new_best_tag_id.masked_fill(mask == MASK_FALSE, 0) best_path.append(best_tag_id) _ = best_path.pop() best_path.reverse() best_path = torch.stack(best_path) # Mask out the extra tags (This might be pointless given thathatt anything that # will use this as a dense tensor downstream will mask it itself?) seq_mask = sequence_mask(lengths, seq_len).to(best_path.device).transpose(0, 1) best_path = best_path.masked_fill(seq_mask == MASK_FALSE, 0) return best_path, path_score @torch.jit.script def script_viterbi_log_softmax_norm( unary: torch.Tensor, trans: torch.Tensor, start_idx: int, end_idx: int ) -> Tuple[torch.Tensor, torch.Tensor]: seq_len: int = unary.size(0) num_tags: int = unary.size(1) fill_value: float = -1e4 # dtype=unary.dtype fails, with prim_dtype error on torch 1.7.1 alphas = torch.full((num_tags,), fill_value, dtype=torch.float, device=unary.device) broadcast_idx = torch.full((num_tags,), start_idx, dtype=torch.long) alphas = alphas.scatter(0, broadcast_idx, torch.zeros((num_tags,))) alphas = alphas.unsqueeze(0) alphas = torch.log(F.softmax(alphas, dim=-1)) backpointers: torch.Tensor = torch.zeros(num_tags, dtype=torch.long).unsqueeze(0) for i in range(seq_len): unary_t = unary[i, :] next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 1) backpointers = torch.cat([backpointers, best_tag_ids.unsqueeze(0)], 0) alphas = (viterbi + unary_t).unsqueeze(0) terminal_vars = alphas.squeeze(0) + trans[end_idx, :] path_score, best_tag_id = torch.max(terminal_vars, 0) best_path = best_tag_id.unsqueeze(0) for i in range(unary.size(0)): t = seq_len - i - 1 best_tag_id = backpointers[t + 1, best_tag_id] best_path = torch.cat([best_path, best_tag_id.unsqueeze(0)], -1) new_path_vec = best_path.flip(0) return new_path_vec[1:], path_score class ViterbiLogSoftmaxNormBatchSize1(nn.Module): def __init__(self, start_idx: int, end_idx: int): super().__init__() self.start_idx = start_idx self.end_idx = end_idx def forward(self, unary: torch.Tensor, trans: torch.Tensor, _: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: unary = unary.squeeze(1) trans = trans.squeeze(0) path, score = script_viterbi_log_softmax_norm(unary, trans, self.start_idx, self.end_idx) return path.unsqueeze(1), score class ViterbiLogSoftmaxNorm(Viterbi): def forward( self, unary: torch.Tensor, trans: torch.Tensor, lengths: torch.Tensor ) -> Tuple[torch.Tensor, torch.Tensor]: """Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] :param trans: torch.FloatTensor: [1, N, N] :param norm: Callable: This function should take the initial and a dim to normalize along. :return: torch.LongTensor: [T, B] the padded paths :return: torch.FloatTensor: [B] the path scores """ seq_len, batch_size, tag_size = unary.size() min_length = torch.min(lengths) backpointers = [] # Alphas: [B, 1, N] alphas = torch.full((batch_size, 1, tag_size), -1e4, device=unary.device) alphas[:, 0, self.start_idx] = 0 alphas = F.log_softmax(alphas, dim=-1) for i, unary_t in enumerate(unary): next_tag_var = alphas + trans viterbi, best_tag_ids = torch.max(next_tag_var, 2) backpointers.append(best_tag_ids) new_alphas = viterbi + unary_t new_alphas.unsqueeze_(1) if i >= min_length: mask = (i < lengths).view(-1, 1, 1) alphas = alphas.masked_fill(mask, 0) + new_alphas.masked_fill(mask == MASK_FALSE, 0) else: alphas = new_alphas # Add end tag terminal_var = alphas.squeeze(1) + trans[:, self.end_idx, :] path_score, best_tag_id = torch.max(terminal_var, 1) # Flip lengths rev_len = seq_len - lengths - 1 best_path = [best_tag_id] for i in range(len(backpointers)): t = len(backpointers) - i - 1 backpointer_t = backpointers[t] # Get new best tag candidate new_best_tag_id = backpointer_t.gather(1, best_tag_id.unsqueeze(1)).squeeze(1) # We are going backwards now, if flipped length was passed # these you aren't in your real results yet mask = i > rev_len best_tag_id = best_tag_id.masked_fill(mask, 0) + new_best_tag_id.masked_fill(mask == MASK_FALSE, 0) best_path.append(best_tag_id) _ = best_path.pop() best_path.reverse() best_path = torch.stack(best_path) # Mask out the extra tags (This might be pointless given that anything that # will use this as a dense tensor downstream will mask it itself?) seq_mask = sequence_mask(lengths, seq_len).to(best_path.device).transpose(0, 1) best_path = best_path.masked_fill(seq_mask == MASK_FALSE, 0) return best_path, path_score def ident(x): return x class TaggerGreedyDecoder(nn.Module): def __init__( self, num_tags: int, constraint_mask: Optional[torch.Tensor] = None, batch_first: bool = True, reduction: str = "batch", ): """A Greedy decoder and loss module for taggers. :param num_tags: `int` The number of output classes :param constraint_mask: `Tensor[1, N, N]` A mask with valid transitions as 1 and invalid as 0 :param batch_first: `bool` Should the batch dimensions be first? :param reduction: `str` Should the loss be calculated at the token level or batch level """ super().__init__() self.num_tags = num_tags if constraint_mask is not None: constraint_mask = F.log_softmax( torch.zeros(constraint_mask.shape).masked_fill(constraint_mask, -1e4), dim=1 ) self.register_buffer("constraint_mask", constraint_mask) else: self.constraint_mask = None # FIXME: we cant do it like this if using TorchScript self.to_batch_first = ident if batch_first else tbh2bth self.to_time_first = bth2tbh if batch_first else ident self.batch_first = batch_first self.loss = SequenceLoss(LossFn=nn.CrossEntropyLoss, avg=reduction) self.viterbi = ViterbiLogSoftmaxNorm(Offsets.GO, Offsets.EOS) @property def transitions(self): return self.constraint_mask def neg_log_loss(self, inputs, tags, lengths): unaries = self.to_batch_first(inputs) tags = self.to_batch_first(tags) return self.loss(unaries, tags) def forward(self, inputs) -> torch.Tensor: unaries, lengths = tensor_and_lengths(inputs) # If there is a constraint mask do a masked viterbi if self.constraint_mask is not None: probv = self.to_time_first(unaries) probv = F.log_softmax(probv, dim=-1) preds, scores = self.viterbi(probv, self.constraint_mask, lengths) if self.batch_first: return tbh2bth(preds) # , scores else: return preds else: # Decoding doesn't care about batch/time first _, preds = torch.max(unaries, -1) mask = sequence_mask(lengths, unaries.shape[1]).to(preds.device) # The mask gets generated as batch first mask = mask if self.batch_first else mask.transpose(0, 1) preds = preds.masked_fill(mask == MASK_FALSE, 0) return preds # , None def extra_repr(self) -> str: str_ = f"n_tags={self.num_tags}, batch_first={self.batch_first}" if self.constraint_mask is not None: str_ += ", constrained=True" return str_ class CRF(nn.Module): def __init__( self, num_tags: int, constraint_mask: Optional[torch.Tensor] = None, batch_first: bool = True, idxs: Tuple[int, int] = (Offsets.GO, Offsets.EOS), ): """Initialize the object. :param num_tags: int, The number of tags in your output (emission size) :param constraint: torch.ByteTensor, Constraints on the transitions [1, N, N] :param idxs: Tuple(int. int), The index of the start and stop symbol in emissions. :param batch_first: bool, if the input [B, T, ...] or [T, B, ...] Note: if idxs is none then the CRF adds these symbols to the emission vectors and n_tags is assumed to be the number of output tags. if idxs is not none then the first element is assumed to be the start index and the second idx is assumed to be the end index. In this case n_tags is assumed to include the start and end symbols. """ super().__init__() self.start_idx, self.end_idx = idxs self.num_tags = num_tags if constraint_mask is not None: self.register_buffer("constraint_mask", constraint_mask) else: self.constraint_mask = None self.transitions_p = nn.Parameter(torch.Tensor(1, self.num_tags, self.num_tags).zero_()) self.batch_first = batch_first self.viterbi = Viterbi(self.start_idx, self.end_idx) def extra_repr(self) -> str: str_ = "n_tags=%d, batch_first=%s" % (self.num_tags, self.batch_first) if self.constraint_mask is not None: str_ += ", constrained=True" return str_ @property def transitions(self): if self.constraint_mask is not None: return self.transitions_p.masked_fill(self.constraint_mask, -1e4) return self.transitions_p def neg_log_loss(self, unary, tags, lengths): """Neg Log Loss with a Batched CRF. :param unary: torch.FloatTensor: [T, B, N] or [B, T, N] :param tags: torch.LongTensor: [T, B] or [B, T] :param lengths: torch.LongTensor: [B] :return: torch.FloatTensor: [B] """ # Convert from [B, T, N] -> [T, B, N] if self.batch_first: unary = unary.transpose(0, 1) tags = tags.transpose(0, 1) _, batch_size, _ = unary.size() fwd_score = self._forward_alg(unary, lengths) gold_score = self.score_sentence(unary, tags, lengths) loss = fwd_score - gold_score batch_loss = torch.mean(loss) return batch_loss def score_sentence(self, unary: torch.Tensor, tags: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: """Score a batch of sentences. :param unary: torch.FloatTensor: [T, B, N] :param tags: torch.LongTensor: [T, B] :param lengths: torch.LongTensor: [B] :param min_length: torch.LongTensor: [] :return: torch.FloatTensor: [B] """ batch_size = lengths.shape[0] assert lengths.shape[0] == unary.shape[1] trans = self.transitions.squeeze(0) # [N, N] start = torch.full((1, batch_size), self.start_idx, dtype=tags.dtype, device=tags.device) # [1, B] tags = torch.cat([start, tags], 0) # [T + 1, B] # Unfold gives me all slices of size 2 (this tag next tag) from dimension T tag_pairs = tags.unfold(0, 2, 1) # Move the pair dim to the front and split it into two indices = tag_pairs.permute(2, 0, 1).chunk(2) trans_score = trans[[indices[1], indices[0]]].squeeze(0) # Pull out the values of the tags from the unary scores. unary_score = unary.gather(2, tags[1:].unsqueeze(-1)).squeeze(-1) mask = sequence_mask(lengths).transpose(0, 1).to(tags.device) scores = unary_score + trans_score scores = scores.masked_fill(mask == MASK_FALSE, 0) scores = scores.sum(0) eos_scores = trans[self.end_idx, tags.gather(0, lengths.unsqueeze(0)).squeeze(0)] scores = scores + eos_scores return scores def _forward_alg(self, unary: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: """For CRF forward on a batch. :param unary: torch.FloatTensor: [T, B, N] :param lengths: torch.LongTensor: [B] :return: torch.FloatTensor: [B] """ # alphas: [B, 1, N] min_length = torch.min(lengths) batch_size = lengths.shape[0] lengths.shape[0] == unary.shape[1] alphas = torch.full((batch_size, 1, self.num_tags), -1e4, device=unary.device) alphas[:, 0, self.start_idx] = 0.0 # alphas.requires_grad = True trans = self.transitions # [1, N, N] for i, unary_t in enumerate(unary): # unary_t: [B, N] unary_t = unary_t.unsqueeze(2) # [B, N, 1] # Broadcast alphas along the rows of trans # Broadcast trans along the batch of alphas # [B, 1, N] + [1, N, N] -> [B, N, N] # Broadcast unary_t along the cols of result # [B, N, N] + [B, N, 1] -> [B, N, N] scores = alphas + trans + unary_t new_alphas = vec_log_sum_exp(scores, 2).transpose(1, 2) # If we haven't reached your length zero out old alpha and take new one. # If we are past your length, zero out new_alpha and keep old one. if i >= min_length: mask = (i < lengths).view(-1, 1, 1) alphas = alphas.masked_fill(mask, 0) + new_alphas.masked_fill(mask == MASK_FALSE, 0) else: alphas = new_alphas terminal_vars = alphas + trans[:, self.end_idx] alphas = vec_log_sum_exp(terminal_vars, 2) return alphas.view(batch_size) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: unary, lengths = inputs if self.training: if self.batch_first: unary = unary.transpose(0, 1) forward = self._forward_alg(unary, lengths) # if self.batch_first: # forward = forward.transpose(0, 1) return forward with torch.no_grad(): return self.decode(unary, lengths)[0] @jit.export def decode(self, unary: torch.Tensor, lengths: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: """Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] or [B, T, N] :param lengths: torch.LongTensor: [B] :return: torch.LongTensor: [B] the paths :return: torch.FloatTensor: [B] the path score """ if self.batch_first: unary = unary.transpose(0, 1) trans = self.transitions # [1, N, N] path, score = self.viterbi(unary, trans, lengths) if self.batch_first: path = path.transpose(0, 1) return path, score class SequenceModel(nn.Module): def __init__(self, nc: int, embeddings: nn.Module, transducer: nn.Module, decoder: Optional[nn.Module] = None): super().__init__() self.embed_model = embeddings self.transducer_model = transducer # TODO: make this a separate model! if transducer.output_dim != nc: self.proj_layer = Dense(transducer.output_dim, nc) else: self.proj_layer = nn.Identity() self.decoder_model = decoder def transduce(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: lengths = inputs["lengths"] embedded = self.embed_model(inputs) embedded = (embedded, lengths) # transduced = self.transducer_model(embedded) transduced = self.proj_layer(self.transducer_model(embedded)) return transduced def decode(self, transduced: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: return self.decoder_model((transduced, lengths)) def forward(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: pass class TagSequenceModel(SequenceModel): def __init__(self, nc: int, embeddings: nn.Module, transducer: nn.Module, decoder: Optional[nn.Module] = None): decoder_model = CRF(nc, batch_first=True) if decoder is None else decoder super().__init__(nc, embeddings, transducer, decoder_model) def neg_log_loss(self, unary: torch.Tensor, tags: torch.Tensor, lengths: torch.Tensor) -> torch.Tensor: return self.decoder_model.neg_log_loss(unary, tags, lengths) def forward(self, inputs: Dict[str, torch.Tensor]) -> torch.Tensor: transduced = self.transduce(inputs) path = self.decode(transduced, inputs["lengths"]) return path class LangSequenceModel(nn.Module): def __init__( self, nc: int, embeddings: nn.Module, transducer: nn.Module, decoder: Optional[nn.Module] = None, name: Optional[str] = None, ): super().__init__() self.embed_model = embeddings self.transducer_model = transducer if hasattr(transducer, "requires_state") and transducer.requires_state: self._call = self._call_with_state self.requires_state = True else: self._call = self._call_without_state self.requires_state = False self.output_layer = nn.Linear(self.transducer_model.output_dim, nc) self.decoder_model = decoder def forward(self, inputs: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: return self._call(inputs) def _call_with_state(self, inputs: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: h = inputs["h"] embedded = self.embed_model(inputs) transduced, hidden = self.transducer_model((embedded, h)) transduced = self.output_layer(transduced) return transduced, hidden def _call_without_state(self, inputs: Dict[str, torch.Tensor]) -> Tuple[torch.Tensor, Optional[torch.Tensor]]: embedded = self.embed_model(inputs) transduced = self.transducer_model((embedded, None)) transduced = self.output_layer(transduced) return transduced, None def pytorch_embedding(weights: torch.Tensor, finetune: bool = True) -> nn.Embedding: """Creation function for making an nn.Embedding with the given weights :param weights: The weights to use :param finetune: Should we fine-tune the embeddings or freeze them """ lut = nn.Embedding(weights.shape[0], weights.shape[1], padding_idx=Offsets.PAD) del lut.weight lut.weight = nn.Parameter(torch.FloatTensor(weights), requires_grad=finetune) return lut def subsequent_mask(size: int): """ Creates a lower triangular mask to mask future :param size: Temporal length :return: A tensor of type `uint8` that is 1s along diagonals and below, zero o.w """ attn_shape = (1, 1, size, size) sub_mask = np.tril(np.ones(attn_shape)).astype("uint8") return torch.from_numpy(sub_mask) class SequenceSequenceAttention(nn.Module): def __init__(self, hsz: int = None, pdrop: float = 0.1, **kwargs): super().__init__() self.hsz = hsz self.dropout = nn.Dropout(pdrop) self.attn = None def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: query, key, value, mask = qkvm a = self._attention(query, key, mask) self.attn = a a = self.dropout(a) return self._update(a, value) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: pass def _update(self, a: torch.Tensor, value: torch.Tensor) -> torch.Tensor: """Attention weights are applied for each value, but in a series of efficient matrix operations. In the case of self-attention, the key and query (used to create the attention weights) and values are all low order projections of the same input. :param a: The attention weights [B, H, T_q, T_k] :param values: The values [B, H, T_k, D] :returns: A tensor of shape [B, H, T_q, D] """ return torch.matmul(a, value) class SeqScaledDotProductAttention(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Scaled dot product attention, as defined in https://arxiv.org/abs/1706.03762 We apply the query to the keys to receive our weights via softmax in a series of efficient matrix operations. In the case of self-attention the key and query are all low order projections of the same input. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SeqScaledDotProductAttentionALiBi(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads slopes = torch.tensor(get_alibi_slopes(self.num_heads)) self.register_buffer("slopes", slopes) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Attention with Linear Biases, defined in https://arxiv.org/pdf/2108.12409.pdf :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) T_k = scores.shape[-1] T_q = scores.shape[-2] offsets = - torch.abs(torch.arange(T_q).view(-1, 1) - torch.arange(T_k).view(1, -1)).to(self.slopes.device) # [T_q, T_k] alibi = self.slopes.unsqueeze(-1).unsqueeze(-1) * offsets.unsqueeze(0) # [H, T_q, T_k] alibi = alibi.unsqueeze(0) # [1, H, T_q, T_k] scores += alibi if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SeqScaledDotProductAttentionT5(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, bidirectional=True, num_buckets=32, max_distance=128, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads self.bidirectional = bidirectional self.num_buckets = num_buckets self.max_distance = max_distance rel_embedding = torch.nn.init.kaiming_normal_(torch.empty((self.num_heads, self.num_buckets), dtype=torch.float), nonlinearity='linear') self.rel_embedding = nn.Parameter(rel_embedding, requires_grad=True) def _relative_position_bucket(self, relative_position): """Taken from https://github.com/tensorflow/mesh/blob/bbb6ce7917e2a8ef1f3dc6990fcacd4f3b075acd/mesh_tensorflow/transformer/transformer_layers.py#L1014 """ ret = 0 n = -relative_position num_buckets = self.num_buckets if self.bidirectional: num_buckets //= 2 ret += torch.lt(n, 0).to(dtype=torch.long) * num_buckets n = torch.abs(n).to(dtype=torch.long) else: n = torch.maximum(n, 0).to(dtype=torch.long) # now n is in the range [0, inf) max_exact = num_buckets // 2 is_small = torch.lt(n, max_exact) val_if_large = max_exact + ( torch.log(n.to(dtype=torch.float32) / max_exact) / math.log(self.max_distance / max_exact) * (num_buckets - max_exact)).to(dtype=torch.long) val_if_large = torch.minimum(val_if_large, torch.tensor(num_buckets - 1)) ret += torch.where(is_small, n, val_if_large) return ret def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Relative Attention described in https://arxiv.org/abs/1910.10683 :param query: a query for alignment. :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) d_k = query.size(-1) scores = torch.matmul(query, key.transpose(-2, -1)) / math.sqrt(d_k) T_k = scores.shape[-1] T_q = scores.shape[-2] memory_position = torch.arange(T_k).view(1, -1) query_position = torch.arange(T_q).view(-1, 1) relative_position = memory_position - query_position rp_bucket = self._relative_position_bucket(relative_position) relative_attention_bias = self.rel_embedding[:, rp_bucket] scores += relative_attention_bias if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SeqDotProductAttention(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: scores = torch.matmul(query, key.transpose(-2, -1)) if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) class SeqDotProductAttentionALiBi(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads slopes = torch.tensor(get_alibi_slopes(self.num_heads)) self.register_buffer("slopes", slopes) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: scores = torch.matmul(query, key.transpose(-2, -1)) T_k = scores.shape[-1] T_q = scores.shape[-2] offsets = - torch.abs(torch.arange(T_q).view(1, -1) - torch.arange(T_k).view(-1, 1)).to(self.slopes.device) # [T_q, T_k] alibi = self.slopes.unsqueeze(-1).unsqueeze(-1) * offsets.unsqueeze(0) # [H, T_q, T_k] alibi = alibi.unsqueeze(0) # [1, H, T_q, T_k] scores += alibi if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) class SeqDotProductAttentionT5(SequenceSequenceAttention): def __init__(self, pdrop: float = 0.1, num_heads=None, bidirectional=True, num_buckets=32, max_distance=128, **kwargs): super().__init__(pdrop=pdrop, **kwargs) self.num_heads = num_heads self.bidirectional = bidirectional self.num_buckets = num_buckets self.max_distance = max_distance rel_embedding = torch.nn.init.kaiming_normal_(torch.empty((self.num_heads, self.num_buckets), dtype=torch.float), nonlinearity='linear') self.rel_embedding = nn.Parameter(rel_embedding, requires_grad=True) def _relative_position_bucket(self, relative_position): """Taken from https://github.com/tensorflow/mesh/blob/bbb6ce7917e2a8ef1f3dc6990fcacd4f3b075acd/mesh_tensorflow/transformer/transformer_layers.py#L1014 """ ret = 0 n = -relative_position num_buckets = self.num_buckets if self.bidirectional: num_buckets //= 2 ret += torch.lt(n, 0).to(dtype=torch.long) * num_buckets n = torch.abs(n).to(dtype=torch.long) else: n = torch.maximum(n, 0).to(dtype=torch.long) # now n is in the range [0, inf) max_exact = num_buckets // 2 is_small = torch.lt(n, max_exact) val_if_large = max_exact + ( torch.log(n.to(dtype=torch.float32) / max_exact) / math.log(self.max_distance / max_exact) * (num_buckets - max_exact)).to(dtype=torch.long) val_if_large = torch.minimum(val_if_large, torch.tensor(num_buckets - 1)) ret += torch.where(is_small, n, val_if_large) return ret def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: """Relative Attention described in https://arxiv.org/abs/1910.10683 :param query: a query for alignment. :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) """ # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) scores = torch.matmul(query, key.transpose(-2, -1)) T_k = scores.shape[-1] T_q = scores.shape[-2] memory_position = torch.arange(T_k).view(1, -1) query_position = torch.arange(T_q).view(-1, 1) relative_position = memory_position - query_position rp_bucket = self._relative_position_bucket(relative_position) relative_attention_bias = self.rel_embedding[:, rp_bucket] scores += relative_attention_bias if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] return F.softmax(scores, dim=-1) class SequenceSequenceRelativeAttention(nn.Module): """This form of attention is specified in Shaw et al 2018: https://www.aclweb.org/anthology/N18-2074.pdf """ def __init__(self, hsz: int = None, pdrop: float = 0.1, **kwargs): super().__init__() self.hsz = hsz self.dropout = nn.Dropout(pdrop) self.attn = None def forward( self, q_k_v_ek_ev_m: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor] ) -> torch.Tensor: """Take in a tuple of tensors corresponding to the query, key, value, edges_key, edges_value and mask variables :param q_k_v_ek_ev_m: A tuple consisting of query, key, value, `edges_key`, `edges_value` and `mask` respectively :return: An updated value Tensor """ query, key, value, edges_key, edges_value, mask = q_k_v_ek_ev_m a = self._attention(query, key, edges_key, mask) self.attn = a a = self.dropout(a) return self._update(a, value, edges_value) def _attention( self, query: torch.Tensor, key: torch.Tensor, edges_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: pass def _update(self, a: torch.Tensor, value: torch.Tensor, edges_value: torch.Tensor) -> torch.Tensor: """Attention weights are applied for each value, but in a series of efficient matrix operations. In the case of self-attention, the key and query (used to create the attention weights) and values are all low order projections of the same input. :param a: The attention weights [B, H, T_q, T_k] :param value: The values [B, H, T_k, D] :param edge_value: The edge values [T_q, T_k, D] :returns: A tensor of shape [B, H, T, D] """ B, H, T_k, D = value.shape updated_values = torch.matmul(a, value) # [B, H, T_q, D] if edges_value is not None: a = a.view(B * H, -1, T_k).transpose(0, 1) # (T_q, BxH, T_k) t = torch.matmul(a, edges_value) # (T_q, BxH, D) update_edge_values = t.transpose(0, 1).view(B, H, -1, D) return updated_values + update_edge_values else: return updated_values class SeqScaledDotProductRelativeAttention(SequenceSequenceRelativeAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention( self, query: torch.Tensor, key: torch.Tensor, edges_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: """Scaled dot product attention, as defined in https://arxiv.org/abs/1706.03762 We apply the query to the keys to receive our weights via softmax in a series of efficient matrix operations. In the case of self-attntion the key and query are all low order projections of the same input. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :param edges_key: a matrix of relative embeddings between each word in a sequence [T_q x T_k x D] :return: A tensor that is (B x H x T_q x T_k) """ B, H, T_q, d_k = query.shape # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) scores_qk = torch.matmul(query, key.transpose(-2, -1)) tbhd = query.reshape(B * H, T_q, d_k).transpose(0, 1) # [T_q, B*H, d_k] scores_qek = torch.matmul(tbhd, edges_key.transpose(-2, -1)) # [T_q, B*H, T_k] scores_qek = scores_qek.transpose(0, 1).view(B, H, T_q, -1) # [B, H, T_q, T_k] scores = (scores_qk + scores_qek) / math.sqrt(d_k) # only for cross-attention T_q != T_k. for such case, mask should be src_mask, which is a sequence_mask with # dimension [B, 1, 1, T_k], and will be broadcast to dim of scores: if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) class SeqDotProductRelativeAttention(SequenceSequenceRelativeAttention): def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _attention( self, query: torch.Tensor, key: torch.Tensor, edges_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: B, H, T_q, d_k = query.shape scores_qk = torch.matmul(query, key.transpose(-2, -1)) tbhd = query.reshape(B * H, T_q, d_k).transpose(0, 1) scores_qek = torch.matmul(tbhd, edges_key.transpose(-2, -1)) scores_qek = scores_qek.transpose(0, 1).view(B, H, T_q, -1) scores = scores_qk + scores_qek if mask is not None: scores = scores.masked_fill(mask == MASK_FALSE, -1e9) return F.softmax(scores, dim=-1) def unfold_tensor(tensor, dim, window_sz): """Unfold a tensor by applying a sliding window on a certain dimension with step 1 and padding of 0's. The window dimension is added as the last dimension :param tensor: the tensor to be unfolded, with shape [d_1, d_2, ..., T, ..., d_n] :param dim: the dimension along which unfolding is applied :param window_sz: sliding window size, need to be an odd number :return: the unfolded tensor with shape [d_1, d_2, ..., T, ..., d_n, window_sz] """ half_window = (window_sz - 1) // 2 if dim < 0: dim = len(tensor.shape) + dim # torch.nn.functional.pad apply backwardly from the last dimension padding = [0, 0] * (len(tensor.shape) - dim - 1) + [half_window, half_window] return F.pad(tensor, padding).unfold(dim, window_sz, 1) class SeqScaledWindowedRelativeAttention(SequenceSequenceRelativeAttention): """This class implements windowed relative attention, i.e. preventing attention beyond rpr_k. For efficiency, _attention and _update are implemented in a different way.""" def __init__(self, pdrop: float = 0.1, **kwargs): super().__init__(pdrop=pdrop, **kwargs) def _unfold_mask(self, mask, batchsz, rpr_k): """Transform mask into the unfolded format.""" window_sz = 2 * rpr_k + 1 T = mask.shape[3] if mask.shape[2] > 1: # mask is from a subsequent mask, with [1, 1, T, T] or [B, 1, T, T] logger.warning("Using subsequent mask with long sequence may cause OOM error.") mask = mask.expand(batchsz, 1, T, T) # expand sequence/subsequent mask into a uniform dim mask = F.pad(mask, [rpr_k, rpr_k]) # pad both sides with rpr_k, [B, 1, T, T + 2*rpr_k] seq = torch.arange(T + 2 * rpr_k) indices = seq.unfold(0, window_sz, 1) # indices of a sliding window, [T, W] indices = indices.unsqueeze(0).unsqueeze(0).expand(batchsz, 1, T, window_sz).to(mask.device) return torch.gather(mask, -1, indices) # [B, 1, T, W]): else: # mask is a sequence mask [B, 1, 1, T] unfolded = unfold_tensor(mask, dim=-1, window_sz=window_sz) # [B, 1, 1, T, W] return unfolded.squeeze(1) # [B, 1, T, W] def _attention( self, query: torch.Tensor, key: torch.Tensor, rpr_key: torch.Tensor, mask: Optional[torch.Tensor] = None ) -> torch.Tensor: """Implementation of attention considering RA masking: using torch.Tensor.unfold to create an extra dimension representing the sliding window. Then when applying matmul, Q, K, V share the same T dimension. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :param rpr_key: tensor of the rpr_key embeddings [W, d_k] :return: A tensor that is [B, H, T, 1, W] to be matmul with values """ B, H, T, d_k = query.shape window_sz = rpr_key.shape[0] rpr_k = (window_sz - 1) // 2 query = query.unsqueeze(-2) # [B, H, T, 1, d_k] key = unfold_tensor(key, dim=2, window_sz=window_sz) # [B, H, T, d_k, W] rpr_key = rpr_key.transpose(0, 1).unsqueeze(0).unsqueeze(0).unsqueeze(0) # [1, 1, 1, d_k, W] scores_qk = torch.matmul(query, key) # [B, H, T, 1, W] scores_qrk = torch.matmul(query, rpr_key) # [B, H, T, 1, W] scores = (scores_qk + scores_qrk) / math.sqrt(d_k) if mask is not None: mask = self._unfold_mask(mask, B, rpr_k).unsqueeze(-2) # [B, 1, T, 1, W] scores = scores.masked_fill(mask == False, -1e9) return F.softmax(scores, dim=-1) def _update(self, a: torch.Tensor, value: torch.Tensor, rpr_value: torch.Tensor) -> torch.Tensor: # a has dim [B, H, T, 1, W] window_sz = a.shape[-1] value = unfold_tensor(value, dim=2, window_sz=window_sz).transpose(-1, -2) # [B, H, T, W, d_value] updated_values = torch.matmul(a, value) # [B, H, T, 1, d_value] if rpr_value is not None: rpr_value = rpr_value.unsqueeze(0).unsqueeze(0).unsqueeze(0) # [1, 1, 1, W, d_value] update_rpr_values = torch.matmul(a, rpr_value) # [B, H, T, 1, d_value] return (updated_values + update_rpr_values).squeeze(3) # [B, H, T, d_value] else: return updated_values.squeeze(3) class SeqBahdanauAttention(SequenceSequenceAttention): def __init__(self, hsz: int, pdrop: float = 0.1, **kwargs): super().__init__(hsz, pdrop=pdrop, **kwargs) self.V = pytorch_linear(self.hsz, 1, bias=False) def _attention(self, query: torch.Tensor, key: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor: # [B, H, T, 1, D] + [B, H, 1, T, D] = [B, H, T, T, D] additive = query.unsqueeze(-2) + key.unsqueeze(-3) non_linear = torch.tanh(additive) # [B, H, T, T, D] @ [D, 1] = [B, H, T, T, 1] scores = self.V(non_linear) # [B, H, T, T] scores = scores.squeeze(-1) return F.softmax(scores, dim=-1) class MultiHeadedAttention(nn.Module): """ Multi-headed attention from https://arxiv.org/abs/1706.03762 via http://nlp.seas.harvard.edu/2018/04/03/attention.html Multi-headed attention provides multiple looks of low-order projections K, Q and V using an attention function (specifically `scaled_dot_product_attention` in the paper. This allows multiple relationships to be illuminated via attention on different positional and representational information from each head. The number of heads `h` times the low-order projection dim `d_k` is equal to `d_model` (which is asserted upfront). This means that each weight matrix can be simply represented as a linear transformation from `d_model` to `d_model`, and partitioned into heads after the fact. Finally, an output projection is applied which brings the output space back to `d_model`, in preparation for the sub-sequent `FFN` sub-layer. There are 3 uses of multi-head attention in the Transformer. For encoder-decoder layers, the queries come from the previous decoder layer, and the memory keys come from the encoder. For encoder layers, the K, Q and V all come from the output of the previous layer of the encoder. And for self-attention in the decoder, K, Q and V all come from the decoder, but here it is masked to prevent using future values """ def __init__( self, num_heads: int, d_model: int, dropout: float = 0.1, scale: bool = False, d_k: Optional[int] = None, ra_type: Optional[str] = None, ): """Constructor for multi-headed attention :param h: The number of heads :param d_model: The model hidden size :param dropout (``float``): The amount of dropout to use :param scale: Should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly :param ra_type: If there is an attention bias term, that will be encapsulated in the attention computation """ super().__init__() if d_k is None: self.d_k = d_model // num_heads if d_model % num_heads != 0: raise Exception(f"d_model ({d_model}) must be evenly divisible by num_heads ({num_heads})") else: self.d_k = d_k self.h = num_heads # for multi-headed attention, w_V projects to h heads, each head has dim d_k; for single headed attention, w_V # project to 1 head with dim d_model if self.h > 1: self.d_value = self.d_k else: self.d_value = d_model self.w_Q = Dense(d_model, self.d_k * self.h) self.w_K = Dense(d_model, self.d_k * self.h) self.w_V = Dense(d_model, self.d_value * self.h) if self.h > 1: # w_O is not needed for single headed attention self.w_O = Dense(self.d_k * self.h, d_model) if scale: if ra_type == 'alibi': self.attn_fn = SeqScaledDotProductAttentionALiBi(dropout, num_heads=num_heads) elif ra_type == 't5': # TODO: pass through options self.attn_fn = SeqScaledDotProductAttentionT5(dropout, num_heads=num_heads) else: self.attn_fn = SeqScaledDotProductAttention(dropout) else: if ra_type == 'alibi': self.attn_fn = SeqDotProductAttentionALiBi(dropout, num_heads=num_heads) elif ra_type == 't5': # TODO: pass through options self.attn_fn = SeqDotProductAttentionT5(dropout, num_heads=num_heads) else: self.attn_fn = SeqDotProductAttention(dropout) self.attn = None def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """Low-order projections of query, key and value into multiple heads, then attention application and dropout :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: Multi-head attention output, result of attention application to sequence (B, T, d_model) """ query, key, value, mask = qkvm batchsz = query.size(0) # (B, H, T, D) query = self.w_Q(query).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) key = self.w_K(key).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) value = self.w_V(value).view(batchsz, -1, self.h, self.d_value).transpose(1, 2) x = self.attn_fn((query, key, value, mask)) self.attn = self.attn_fn.attn x = x.transpose(1, 2).contiguous().view(batchsz, -1, self.h * self.d_value) if self.h > 1: return self.w_O(x) else: return x class MultiHeadedRelativeAttention(nn.Module): """ Multi-headed relative attention from Shaw et al 2018 (https://www.aclweb.org/anthology/N18-2074.pdf) This method follows the same approach of MultiHeadedAttention, but it computes Relative Position Representations (RPR) which are used as part of the attention computations. To facilitate this, the model has its own internal embeddings lookup table, and it has an updated computation for both the attention weights and the application of those weights to follow them. """ def __init__( self, num_heads: int, d_model: int, rpr_k: int, dropout: float = 0.1, scale: bool = False, d_k: Optional[int] = None, windowed_ra: bool = False, rpr_value_on: bool = True ): """Constructor for multi-headed attention :param num_heads: The number of heads :param d_model: The model hidden size :param rpr_k: distance within which relative positional embedding will be considered :param windowed_ra: whether prevent attention beyond rpr_k :param dropout (``float``): The amount of dropout to use :param scale: Should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly """ super().__init__() if d_k is None: self.d_k = d_model // num_heads if d_model % num_heads != 0: raise Exception(f"d_model ({d_model}) must be evenly divisible by num_heads ({num_heads})") else: self.d_k = d_k self.h = num_heads # for multi-headed attention, w_V projects to h heads, each head has dim d_k; for single headed attention, w_V # project to 1 head with dim d_model if self.h > 1: self.d_value = self.d_k else: self.d_value = d_model self.rpr_k = rpr_k self.rpr_value_on = rpr_value_on self.rpr_key = nn.Embedding(2 * rpr_k + 1, self.d_k) if self.rpr_value_on: self.rpr_value = nn.Embedding(2 * rpr_k + 1, self.d_value) self.windowed_ra = windowed_ra self.w_Q = Dense(d_model, self.d_k * self.h) self.w_K = Dense(d_model, self.d_k * self.h) self.w_V = Dense(d_model, self.d_value * self.h) if self.h > 1: # w_O is not needed for sinlge headed attention self.w_O = Dense(self.d_k * self.h, d_model) if scale: if windowed_ra: self.attn_fn = SeqScaledWindowedRelativeAttention(dropout) else: self.attn_fn = SeqScaledDotProductRelativeAttention(dropout) else: self.attn_fn = SeqDotProductRelativeAttention(dropout) self.attn = None def make_rpr(self, q_len, k_len, device) -> Tuple[torch.Tensor, torch.Tensor]: """Create a matrix shifted by self.rpr_k and bounded between 0 and 2*self.rpr_k to provide 0-based indexing for embedding """ q_seq = torch.arange(q_len).to(device) k_seq = torch.arange(k_len).to(device) window_len = 2 * self.rpr_k edges = k_seq.view(1, -1) - q_seq.view(-1, 1) + self.rpr_k # [q_len, k_len] edges = torch.clamp(edges, 0, window_len) if self.rpr_value_on: return self.rpr_key(edges), self.rpr_value(edges) # [q_len, k_len, d_k] else: return self.rpr_key(edges), None def make_windowed_rpr(self, device): window_len = 2 * self.rpr_k + 1 window = torch.arange(window_len).to(device) if self.rpr_value_on: return self.rpr_key(window), self.rpr_value(window) else: return self.rpr_key(window), None def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """Low-order projections of query, key and value into multiple heads, then attention application and dropout :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: Multi-head attention output, result of attention application to sequence (B, T, d_model) """ query, key, value, mask = qkvm batchsz = query.size(0) query_len = query.size(1) key_len = key.size(1) # key and value have the same length, but query can have a different length # (B, H, T, D) query = self.w_Q(query).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) key = self.w_K(key).view(batchsz, -1, self.h, self.d_k).transpose(1, 2) value = self.w_V(value).view(batchsz, -1, self.h, self.d_value).transpose(1, 2) if self.windowed_ra: rpr_key, rpr_value = self.make_windowed_rpr(query.device) else: rpr_key, rpr_value = self.make_rpr(query_len, key_len, query.device) x = self.attn_fn((query, key, value, rpr_key, rpr_value, mask)) self.attn = self.attn_fn.attn x = x.transpose(1, 2).contiguous().view(batchsz, -1, self.h * self.d_value) if self.h > 1: return self.w_O(x) else: return x class TransformerEncoderBase(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, activation_type: str = "gelu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, ra_type: Optional[str] = None, **kwargs, ): super().__init__() self.d_model = d_model self.d_ff = d_ff if d_ff is not None else 4 * d_model if rpr_k is not None and rpr_k != 0: self.self_attn = MultiHeadedRelativeAttention(num_heads, d_model, rpr_k, pdrop, scale, d_k=d_k, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on) else: self.self_attn = MultiHeadedAttention(num_heads, d_model, pdrop, scale=scale, d_k=d_k, ra_type=ra_type) self.ffn = nn.Sequential( Dense(self.d_model, self.d_ff), get_activation(activation_type), nn.Dropout(ffn_pdrop), Dense(self.d_ff, self.d_model), ) self.ln1 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.ln2 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.dropout = nn.Dropout(pdrop) class PreLNTransformerEncoder(TransformerEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: `(x, mask)` :return: The output tensor """ x, mask = inputs h = self.ln1(x) x = x + self.dropout(self.self_attn((h, h, h, mask))) x = x + self.dropout(self.ffn(self.ln2(x))) return x class PreLNBeforeResConnTransformerEncoder(TransformerEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: `(x, mask)` :return: The output tensor """ x, mask = inputs x = self.ln1(x) h = self.self_attn((x, x, x, mask)) x = x + self.dropout(h) x = self.ln2(x) x = x + self.dropout(self.ffn(x)) return x class PostLNTransformerEncoder(TransformerEncoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """ :param inputs: `(x, mask)` :return: The output tensor """ x, mask = inputs h = self.self_attn((x, x, x, mask)) x = x + self.dropout(h) x = self.ln2(x) x = x + self.dropout(self.ffn(x)) x = self.ln1(x) return x class SpatialGatingUnit(nn.Module): """Spatial gating unit There are 2 ways we can look at this unit, as an MLP or a Conv with kernel length 1 l = nn.Linear(T, T) c = nn.Conv1d(T, T, 1) l(x.transpose(1, 2)).transpose(1, 2) c(x) """ def __init__(self, d_ffn: int, nctx: int, layer_norm_eps: float = 1.0e-6): super().__init__() self.norm = nn.LayerNorm(d_ffn // 2, eps=layer_norm_eps) self.proj = pytorch_conv1d(nctx, nctx, 1) nn.init.constant_(self.proj.bias, 1.0) def split(self, x): u, v = x.chunk(2, dim=-1) return u, v def forward(self, x): u, v = self.split(x) v = self.norm(v) v = self.proj(v) return u * v class GatedMLPEncoder(nn.Module): """Following https://arxiv.org/pdf/2105.08050.pdf """ def __init__( self, d_model: int, pdrop: float, nctx: int = 256, activation_type: str = "gelu", d_ff: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6 ): super().__init__() self.d_model = d_model self.d_ff = d_ff if d_ff is not None else 4 * d_model self.to_ffn = Dense(self.d_model, self.d_ff) self.activation = get_activation(activation_type) self.ffn_drop = nn.Dropout(ffn_pdrop) self.from_sgu = Dense(self.d_ff//2, self.d_model) self.norm = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.dropout = nn.Dropout(pdrop) self.spatial_gating_unit = SpatialGatingUnit(self.d_ff, nctx, layer_norm_eps) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: """Do gMLP forward TODO: we arent using the mask ATM :param inputs: `(x, mask)` :return: The output tensor """ # The shortcut here happens pretty early shortcut, mask = inputs # A "channel" norm x = self.norm(shortcut) # A "channel" FFN x = self.dropout(self.to_ffn(x)) # gelu according to https://arxiv.org/pdf/2105.08050.pdf x = self.activation(x) # "spatial" projection (over T) x = self.spatial_gating_unit(x) # "channel" projection x = self.from_sgu(x) x = self.dropout(x) return x + shortcut class TransformerDecoderBase(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, activation_type: str = "gelu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6, rpr_value_on: bool = True, ra_type: Optional[str] = None, ): super().__init__() self.d_model = d_model self.d_ff = d_ff if d_ff is not None else 4 * d_model if rpr_k is not None: self.self_attn = MultiHeadedRelativeAttention(num_heads, d_model, rpr_k, pdrop, scale, d_k=d_k, rpr_value_on=rpr_value_on) self.src_attn = MultiHeadedRelativeAttention(num_heads, d_model, rpr_k, pdrop, scale, d_k=d_k, rpr_value_on=rpr_value_on) else: self.self_attn = MultiHeadedAttention(num_heads, d_model, pdrop, scale, d_k=d_k, ra_type=ra_type) self.src_attn = MultiHeadedAttention(num_heads, d_model, pdrop, scale, d_k=d_k, ra_type=ra_type) self.ffn = nn.Sequential( Dense(self.d_model, self.d_ff), nn.Dropout(ffn_pdrop), get_activation(activation_type), Dense(self.d_ff, self.d_model), ) self.ln1 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.ln2 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.ln3 = nn.LayerNorm(self.d_model, eps=layer_norm_eps) self.dropout = nn.Dropout(pdrop) class PreLNTransformerDecoder(TransformerDecoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: x, memory, src_mask, tgt_mask = inputs h = self.ln1(x) x = x + self.dropout(self.self_attn((h, h, h, tgt_mask))) h = self.ln2(x) x = x + self.dropout(self.src_attn((h, memory, memory, src_mask))) h = self.ln3(x) x = x + self.dropout(self.ffn(h)) return x class PreLNBeforeResConnTransformerDecoder(TransformerDecoderBase): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: x, memory, src_mask, tgt_mask = inputs x = self.ln1(x) x = x + self.dropout(self.self_attn((x, x, x, tgt_mask))) x = self.ln2(x) x = x + self.dropout(self.src_attn((x, memory, memory, src_mask))) x = self.ln3(x) x = x + self.dropout(self.ffn(x)) return x class PostLNTransformerDecoder(nn.Module): def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: x, memory, src_mask, tgt_mask = inputs x = x + self.dropout(self.self_attn((x, x, x, tgt_mask))) x = self.ln2(x) x = x + self.dropout(self.src_attn((x, memory, memory, src_mask))) x = self.ln3(x) x = x + self.dropout(self.ffn(x)) x = self.ln1(x) return x class TransformerEncoderStack(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, layer_drop: float = 0.0, ra_type: Optional[str] = None, transformer_type: Optional[str] = False, **kwargs, ): super().__init__() self.encoders = nn.ModuleList() if layer_norms_after or transformer_type == "post-layer-norm": logger.info("Using post-layer-norm transformer (encoder)") TransformerEncoder = PostLNTransformerEncoder self.ln = nn.Identity() elif transformer_type == "pre-layer-norm": TransformerEncoder = PreLNTransformerEncoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) else: # transformer_type == "pre-layer-norm-before-resconn" logger.info("Using layer norm before residual connections (encoder)") if layer_norms_after: raise Exception(f"Mutually exclusive options ({transformer_type}) and layer_norms_after=True)",) TransformerEncoder = PreLNBeforeResConnTransformerEncoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) self.output_dim = d_model self.layer_drop = layer_drop if not is_sequence(rpr_k): rpr_k = [rpr_k] * layers elif len(rpr_k) == 1: rpr_k = [rpr_k[0]] * layers for i in range(layers): self.encoders.append( TransformerEncoder( num_heads, d_model, pdrop, scale, activation, d_ff, d_k, rpr_k=rpr_k[i], ffn_pdrop=ffn_pdrop, layer_norm_eps=layer_norm_eps, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on, ra_type=ra_type ) ) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, mask = inputs for layer in self.encoders: pdrop = np.random.random() if not self.training or (pdrop >= self.layer_drop): x = layer((x, mask)) return self.ln(x) class GatedMLPEncoderStack(nn.Module): """Following https://arxiv.org/pdf/2105.08050.pdf """ def __init__( self, d_model: int, pdrop: float, layers: int = 1, nctx: int = 256, activation: str = "gelu", d_ff: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norm_eps: float = 1.0e-6, layer_drop: float = 0.0, **kwargs, ): super().__init__() self.encoders = nn.ModuleList() self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) self.output_dim = d_model self.layer_drop = layer_drop for i in range(layers): self.encoders.append( GatedMLPEncoder( d_model, pdrop, nctx, activation, d_ff, ffn_pdrop=ffn_pdrop, layer_norm_eps=layer_norm_eps, ) ) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, mask = inputs for layer in self.encoders: pdrop = np.random.random() if not self.training or (pdrop >= self.layer_drop): x = layer((x, mask)) return self.ln(x) class TransformerEncoderStackWithLengths(TransformerEncoderStack): def __init__( self, num_heads: int, d_model: int, pdrop: bool, scale: bool = True, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, input_sz: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, layer_drop: float = 0.0, ra_type: Optional[str] = None, transformer_type: Optional[str] = None, **kwargs, ): super().__init__(num_heads, d_model, pdrop, scale, layers, activation, d_ff, d_k, rpr_k, ffn_pdrop, layer_norms_after, layer_norm_eps, windowed_ra, rpr_value_on, layer_drop, ra_type, transformer_type, **kwargs) self.proj = WithDropout(pytorch_linear(input_sz, d_model), pdrop) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, lengths = inputs x = self.proj(x) max_seqlen = x.shape[1] mask = sequence_mask(lengths, max_seqlen).to(x.device) return super().forward((x, mask.unsqueeze(1).unsqueeze(1))) class TransformerEncoderStackWithTimeMask(TransformerEncoderStack): def __init__( self, num_heads: int, d_model: int, pdrop: bool, scale: bool = True, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, input_sz: Optional[int] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, windowed_ra: Optional[bool] = False, rpr_value_on: bool = True, layer_drop: float = 0.0, ra_type: Optional[str] = None, transformer_type: Optional[str] = None, **kwargs, ): super().__init__(num_heads, d_model, pdrop, scale, layers, activation, d_ff, d_k, rpr_k, ffn_pdrop, layer_norms_after, layer_norm_eps, windowed_ra, rpr_value_on, layer_drop, ra_type, transformer_type, **kwargs) self.proj = WithDropout(pytorch_linear(input_sz, d_model), pdrop) def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor: x, lengths = inputs x = self.proj(x) max_seqlen = x.shape[1] mask = subsequent_mask(max_seqlen).to(x.device) return super().forward((x, mask.unsqueeze(1).unsqueeze(1))) class TransformerDecoderStack(nn.Module): def __init__( self, num_heads: int, d_model: int, pdrop: float, scale: bool = True, layers: int = 1, activation_type: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, ffn_pdrop: Optional[float] = 0.0, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, layer_drop: float = 0.0, rpr_value_on: bool = True, ra_type: Optional[str] = None, transformer_type: Optional[str] = None, **kwargs, ): super().__init__() self.decoders = nn.ModuleList() self.layer_drop = layer_drop if layer_norms_after or transformer_type == "post-layer-norm": logger.info("Using post-layer-norm transformer (decoder)") TransformerDecoder = PostLNTransformerDecoder self.ln = nn.Identity() elif transformer_type == "pre-layer-norm": TransformerDecoder = PreLNTransformerDecoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) else: # transformer_type == "pre-layer-norm-before-resconn" logger.info("Using layer norm before residual connections (decoder)") if layer_norms_after: raise Exception(f"Mutually exclusive options ({transformer_type}) and layer_norms_after=True)",) TransformerDecoder = PreLNBeforeResConnTransformerDecoder self.ln = nn.LayerNorm(d_model, eps=layer_norm_eps) if not is_sequence(rpr_k): rpr_k = [rpr_k] * layers elif len(rpr_k) == 1: rpr_k = [rpr_k[0]] * layers for i in range(layers): self.decoders.append( TransformerDecoder(num_heads, d_model, pdrop, scale, activation_type, d_ff, d_k=d_k, rpr_k=rpr_k[i], ffn_pdrop=ffn_pdrop, layer_norm_eps=layer_norm_eps, rpr_value_on=rpr_value_on, ra_type=ra_type) ) def forward(self, inputs): x, memory, src_mask, tgt_mask = inputs for layer in self.decoders: pdrop = np.random.random() if not self.training or (pdrop >= self.layer_drop): x = layer((x, memory, src_mask, tgt_mask)) return self.ln(x) def update_lengths(lengths, eoses, idx): """Update the length of a generated tensor based on the first EOS found. This is useful for a decoding situation where tokens after an EOS can be something other than EOS. This also makes sure that a second generated EOS doesn't affect the lengths. :param lengths: `torch.LongTensor`: The lengths where zero means an unfinished sequence. :param eoses: `torch.ByteTensor`: A mask that has 1 for sequences that generated an EOS. :param idx: `int`: What value to fill the finished lengths with (normally the current decoding timestep). :returns: `torch.Tensor`: The updated lengths tensor (same shape and type). """ # If a length is 0 it has never had a length set so it is eligible to have # this EOS be the length. updatable_lengths = lengths == 0 # If this length can be updated AND this token is an eos lengths_mask = updatable_lengths & eoses return lengths.masked_fill(lengths_mask, idx) def gnmt_length_penalty(lengths, alpha=0.8): """Calculate a length penalty from https://arxiv.org/pdf/1609.08144.pdf The paper states the penalty as (5 + |Y|)^a / (5 + 1)^a. This is implemented as ((5 + |Y|) / 6)^a for a (very) tiny performance boost :param lengths: `torch.LongTensor`: [B, K] The lengths of the beams. :param alpha: `float`: A hyperparameter. See Table 2 for a search on this parameter. :returns: `torch.FloatTensor`: [B, K, 1] The penalties. """ lengths = lengths.to(torch.float) penalty = torch.pow(((5 + lengths) / 6), alpha) return penalty.unsqueeze(-1) def no_length_penalty(lengths): """A dummy function that returns a no penalty (1).""" return torch.ones_like(lengths).to(torch.float).unsqueeze(-1) def repeat_batch(t, K, dim=0): """Repeat a tensor while keeping the concept of a batch. :param t: `torch.Tensor`: The tensor to repeat. :param K: `int`: The number of times to repeat the tensor. :param dim: `int`: The dimension to repeat in. This should be the batch dimension. :returns: `torch.Tensor`: The repeated tensor. The new shape will be batch size * K at dim, the rest of the shapes will be the same. Example:: >>> a = torch.arange(10).view(2, -1) >>> a tensor([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) >>> a.repeat(2, 1) tensor([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) >>> repeat_batch(a, 2) tensor([[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [5, 6, 7, 8, 9]]) """ shape = t.shape tiling = [1] * (len(shape) + 1) tiling[dim + 1] = K tiled = t.unsqueeze(dim + 1).repeat(tiling) old_bsz = shape[dim] new_bsz = old_bsz * K new_shape = list(shape[:dim]) + [new_bsz] + list(shape[dim + 1 :]) return tiled.view(new_shape) class BeamSearchBase: def __init__(self, beam=1, length_penalty=None, **kwargs): self.length_penalty = length_penalty if length_penalty else no_length_penalty self.K = beam def init(self, encoder_outputs): pass def step(self, paths, extra): pass def update(self, beams, extra): pass def __call__(self, encoder_outputs, **kwargs): """Perform batched Beam Search. Note: The paths and lengths generated do not include the <GO> token. :param encoder_outputs: `namedtuple` The outputs of the encoder class. :param init: `Callable(ecnoder_outputs: encoder_outputs, K: int)` -> Any: A callable that is called once at the start of the search to initialize things. This returns a blob that is passed to other callables. :param step: `Callable(paths: torch.LongTensor, extra) -> (probs: torch.FloatTensor, extra): A callable that is does a single decoding step. It returns the log probabilities over the vocabulary in the last dimension. It also returns any state the decoding process needs. :param update: `Callable(beams: torch.LongTensor, extra) -> extra: A callable that is called to edit the decoding state based on the selected best beams. :param length_penalty: `Callable(lengths: torch.LongTensor) -> torch.floatTensor A callable that generates a penalty based on the lengths. Lengths is [B, K] and the returned penalty should be [B, K, 1] (or [B, K, V] to have token based penalties?) :Keyword Arguments: * *beam* -- `int`: The number of beams to use. * *mxlen* -- `int`: The max number of steps to run the search for. :returns: tuple(preds: torch.LongTensor, lengths: torch.LongTensor, scores: torch.FloatTensor) preds: The predicted values: [B, K, max(lengths)] lengths: The length of each prediction [B, K] scores: The score of each path [B, K] """ mxlen = kwargs.get("mxlen", 100) bsz = encoder_outputs.output.shape[0] device = encoder_outputs.output.device with torch.no_grad(): extra = self.init(encoder_outputs) paths = torch.full((bsz, self.K, 1), Offsets.GO, dtype=torch.long, device=device) # This tracks the log prob of each beam. This is distinct from score which # is based on the log prob and penalties. log_probs = torch.zeros((bsz, self.K), dtype=torch.float, device=device) # Tracks the lengths of the beams, unfinished beams have lengths of zero. lengths = torch.zeros((bsz, self.K), dtype=torch.long, device=device) for i in range(mxlen - 1): probs, extra = self.step(paths, extra) V = probs.shape[-1] probs = probs.view((bsz, self.K, V)) # [B, K, V] if i > 0: # This mask is for all beams that are done. done_mask = (lengths != 0).unsqueeze(-1) # [B, K, 1] # Can creating this mask be moved out of the loop? It never changes but we don't have V # This mask selects the EOS token eos_mask = torch.zeros((1, 1, V), dtype=done_mask.dtype, device=device) eos_mask[:, :, Offsets.EOS] = 1 # This mask selects the EOS token of only the beams that are done. mask = done_mask & eos_mask # Put all probability mass on the EOS token for finished beams. # Otherwise as the other beams get longer they will all give # up and eventually select this beam and all outputs become # the same. probs = probs.masked_fill(done_mask, -np.inf) probs = probs.masked_fill(mask, 0) probs = log_probs.unsqueeze(-1) + probs # [B, K, V] # Calculate the score of the beam based on the current length. path_scores = probs / self.length_penalty(lengths.masked_fill(lengths == 0, i + 1)) else: # On the first step we only look at probabilities for the first beam. # If we don't then the probs will be the same for each beam # This means the same token will be selected for each beam # And we won't get any diversity. # Using only the first beam ensures K different starting points. path_scores = probs[:, 0, :] flat_scores = path_scores.view(bsz, -1) # [B, K * V] best_scores, best_idx = flat_scores.topk(self.K, 1) # Get the log_probs of the best scoring beams log_probs = probs.view(bsz, -1).gather(1, best_idx).view(bsz, self.K) best_beams = best_idx // V # Get which beam it came from best_idx = best_idx % V # Get the index of the word regardless of which beam it is. # Best Beam index is relative within the batch (only [0, K)). # This makes the index global (e.g. best beams for the second # batch example is in [K, 2*K)). offsets = torch.arange(bsz, dtype=torch.long, device=device) * self.K offset_beams = best_beams + offsets.unsqueeze(-1) flat_beams = offset_beams.view(bsz * self.K) # Select the paths to extend based on the best beams flat_paths = paths.view(bsz * self.K, -1) new_paths = flat_paths[flat_beams, :].view(bsz, self.K, -1) # Add the selected outputs to the paths paths = torch.cat([new_paths, best_idx.unsqueeze(-1)], dim=2) # Select the lengths to keep tracking based on the valid beams left. lengths = lengths.view(-1)[flat_beams].view((bsz, self.K)) extra = self.update(flat_beams, extra) # Updated lengths based on if we hit EOS last = paths[:, :, -1] eoses = last == Offsets.EOS lengths = update_lengths(lengths, eoses, i + 1) if (lengths != 0).all(): break else: # This runs if the loop didn't break meaning one beam hit the max len # Add an EOS to anything that hasn't hit the end. This makes the scores real. probs, extra = self.step(paths, extra) V = probs.size(-1) probs = probs.view((bsz, self.K, V)) probs = probs[:, :, Offsets.EOS] # Select the score of EOS # If any of the beams are done mask out the score of this EOS (they already had an EOS) probs = probs.masked_fill((lengths != 0), 0) log_probs = log_probs + probs end_tokens = torch.full((bsz, self.K, 1), Offsets.EOS, device=device, dtype=paths.dtype) paths = torch.cat([paths, end_tokens], dim=2) lengths = update_lengths(lengths, torch.ones_like(lengths) == 1, mxlen) lengths = update_lengths(lengths, torch.ones_like(lengths) == 1, mxlen) best_scores = log_probs / self.length_penalty(lengths).squeeze(-1) # Slice off the Offsets.GO token paths = paths[:, :, 1:] return paths, lengths, best_scores def checkpoint_for(model_base, epoch, tick_type='epoch'): return '{}-{}-{}'.format(model_base, tick_type, epoch+1) def rm_old_checkpoints(base_path, current_epoch, last_n=10): for i in range(0, current_epoch-last_n): checkpoint_i = checkpoint_for(base_path, i) for extension in ('.pth', '.npz'): checkpoint_name = checkpoint_i + extension if os.path.exists(checkpoint_name): os.remove(checkpoint_name) def find_latest_checkpoint(checkpoint_dir: str, wildcard="checkpoint") -> Tuple[str, int]: step_num = 0 for f in glob.glob(os.path.join(checkpoint_dir, f"{wildcard}*")): base = os.path.basename(f) if "-" not in base: continue last = base.split("-")[-1] for x in ('.pth', '.npz'): last = last.replace(x, '', -1) this_step_num = int(last) if this_step_num > step_num: checkpoint = f step_num = this_step_num return checkpoint, step_num def save_checkpoint(model: torch.nn.Module, model_base: str, count: int, tick_type: str = 'epoch', save_npz: bool = False): from eight_mile.pytorch.serialize import save_tlm_npz, save_tlm_output_npz, save_transformer_seq2seq_npz, save_transformer_de_npz checkpoint_name = checkpoint_for(model_base, count, tick_type=tick_type) # Its possible due to how its called that we might save the same checkpoint twice if we dont check first if os.path.exists(checkpoint_name): logger.info("Checkpoint already exists: %s", checkpoint_name) return logger.info("Creating checkpoint: %s", checkpoint_name) model_ = model.module if hasattr(model, 'module') else model torch.save(model_.state_dict(), checkpoint_name+'.pth') if save_npz: if hasattr(model_, 'decoder'): save_transformer_seq2seq_npz(model_, checkpoint_name+'.npz') elif hasattr(model_, 'reduction_layer'): save_transformer_de_npz(model_, checkpoint_name+'.npz') elif hasattr(model_, 'output_layer'): save_tlm_output_npz(model_, checkpoint_name+'.npz') else: save_tlm_npz(model_, checkpoint_name+'.npz') if tick_type == 'epoch': rm_old_checkpoints(model_base, count) def init_distributed(local_rank): if local_rank == -1: # https://github.com/kubeflow/pytorch-operator/issues/128 # https://github.com/pytorch/examples/blob/master/imagenet/main.py logger.info("Setting local rank to RANK env variable") local_rank = int(os.environ['RANK']) logger.warning("Local rank (%d)", local_rank) # In an env like k8s with kubeflow each worker will only see a single gpu # with an id of 0. If the gpu count is 1 then we are probably in an env like # that so we should just use the first (and only) gpu avaiable if torch.cuda.device_count() == 1: torch.cuda.set_device(0) device = torch.device("cuda", 0) # This program assumes multiprocess/multi-device on a single node. Each # process gets a rank (via cli or ENV variable) and uses that rank to select # which gpu to use. This only makes sense on a single node, if you had 4 # processes on 2 nodes where each node has 2 GPUs then the ranks would be # 0, 1, 2, 3 but the gpus numbers would be node 0: 0, 1 and node 1: 0, 1 # and this assignment to gpu 3 would fail. On a single node with 4 processes # and 4 gpus the rank and gpu ids will align and this will work else: torch.cuda.set_device(local_rank) device = torch.device("cuda", local_rank) torch.distributed.init_process_group(backend='nccl', init_method='env://') return device, local_rank class AttentionReduction(nn.Module): """ This is a reduction that is given Q, K, V and a mask vector. Different from base reductions, which get an embedding stack """ def __init__(self): super().__init__() def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """Inputs are the same as for a normal attention function, but the output here is a single tensor, ``[B, H]`` :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: sentence-level encoding with dim [B, d_model] """ class SingleHeadReduction(AttentionReduction): """ Implementation of the "self_attention_head" layer from the conveRT paper (https://arxiv.org/pdf/1911.03688.pdf) """ def __init__( self, d_model: int, dropout: float = 0.0, scale: bool = False, d_k: Optional[int] = None, pooling: str = 'sqrt_length', ): """ :param d_model: The model hidden size :param dropout (``float``): The amount of dropout to use :param scale: should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly """ super().__init__() self.output_dim = d_model if d_k is None: self.d_k = d_model else: self.d_k = d_k self.w_Q = Dense(d_model, self.d_k) self.w_K = Dense(d_model, self.d_k) if scale: self.attn_fn = SeqScaledDotProductAttention(dropout) else: self.attn_fn = SeqDotProductAttention(dropout) self.attn = None pooling = pooling.lower() self.fill = 0 if pooling == 'max': self.pool = self._max_pool self.fill = -1e9 elif pooling == 'mean': self.pool = self._mean_pool else: self.pool = self._sqrt_length_pool def _sqrt_length_pool(self, x, seq_lengths): x = x.sum(dim=1) # [B, D] x = x * seq_lengths.float().sqrt().unsqueeze(-1) return x def _mean_pool(self, x, seq_lengths): return torch.sum(x, 1, keepdim=False) / torch.unsqueeze(seq_lengths, -1).to(x.dtype).to( x.device ) def _max_pool(self, x, _): x, _ = torch.max(x, 1, keepdim=False) return x def forward(self, qkvm: Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]) -> torch.Tensor: """According to conveRT model's graph, they project token encodings to lower-dimensional query and key in single head, use them to calculate the attention score matrix that has dim [B, T, T], then sum over the query dim to get a tensor with [B, 1, T] (meaning the amount of attentions each token gets from all other tokens), scale it by sqrt of sequence lengths, then use it as the weight to weighted sum the token encoding to get the sentence encoding. we implement it in an equivalent way that can best make use of the eight_mile codes: do the matrix multiply with value first, then sum over the query dimension. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: sentence-level encoding with dim [B, d_model] """ query, key, value, mask = qkvm batchsz = query.size(0) seq_mask = mask.squeeze(1).squeeze(1) # [B, T] seq_lengths = seq_mask.sum(dim=1) # (B, H, T, D), still have num_heads = 1 to use the attention function defined in eight_miles query = self.w_Q(query).view(batchsz, -1, 1, self.d_k).transpose(1, 2) key = self.w_K(key).view(batchsz, -1, 1, self.d_k).transpose(1, 2) value = value.view(batchsz, -1, 1, self.output_dim).transpose(1, 2) x = self.attn_fn((query, key, value, mask)) # [B, 1, T, D] self.attn = self.attn_fn.attn x = x.squeeze(1) # [B, T, D] x = x.masked_fill(seq_mask.unsqueeze(-1) == MASK_FALSE, self.fill) return self.pool(x, seq_lengths) class TransformerDiscriminator(nn.Module): """A Transformer model that tries to predict if each token is real or fake This model is based on [ELECTRA: Pre-Training Text Encoders as Discriminators Rather Than Generators, Clark et al. 2019](https://openreview.net/pdf?id=r1xMH1BtvB). """ def __init__( self, embeddings, num_heads: int, d_model: int, dropout: bool, layers: int = 1, activation: str = "relu", d_ff: Optional[int] = None, d_k: Optional[int] = None, rpr_k: Optional[Union[int, List[int]]] = None, layer_norms_after: bool = False, layer_norm_eps: float = 1.0e-6, embeddings_reduction: str = 'sum', **kwargs, ): super().__init__() self.embeddings = EmbeddingsStack(embeddings, dropout, reduction=embeddings_reduction) self.weight_std = kwargs.get('weight_std', 0.02) assert self.embeddings.dsz == d_model self.transformer = TransformerEncoderStack( num_heads, d_model=d_model, pdrop=dropout, scale=True, layers=layers, activation=activation, d_ff=d_ff, rpr_k=rpr_k, d_k=d_k, layer_norms_after=layer_norms_after, layer_norm_eps=layer_norm_eps ) self.proj_to_output = pytorch_linear(d_model, 1) self.apply(self.init_layer_weights) self.lengths_feature = kwargs.get('lengths_feature', list(self.embeddings.keys())[0]) def init_layer_weights(self, module): if isinstance(module, (nn.Linear, nn.Embedding, nn.LayerNorm)): module.weight.data.normal_(mean=0.0, std=self.weight_std) if isinstance(module, (nn.Linear, nn.LayerNorm)) and module.bias is not None: module.bias.data.zero_() def forward(self, features): embedded = self.embeddings(features) x = features[self.lengths_feature] input_mask = torch.zeros(x.shape, device=x.device, dtype=torch.long).masked_fill(x != Offsets.PAD, 1).unsqueeze(1).unsqueeze(1) transformer_out = self.transformer((embedded, input_mask)) binary = self.proj_to_output(transformer_out) return torch.sigmoid(binary) def create_loss(self): return nn.BCELoss(reduction="none") class PooledSequenceCriterion(nn.Module): def __init__(self, LossFn=nn.BCEWithLogitsLoss, avg='token'): super().__init__() if avg == 'token': self.crit = LossFn() self._norm = self._no_norm else: self.crit = LossFn() self._norm = self._batch_norm def _batch_norm(self, loss, inputs): return loss / inputs.size()[0] def _no_norm(self, loss, inputs): return loss def forward(self, inputs, targets): """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. """ #inputs = inputs.transpose(0, 1) C = inputs.shape[-1] flat_targets = torch.nn.functional.one_hot(targets, C) # Get the offsets of the non-zero targets, the values of these are all on flat_targets = (torch.sum(flat_targets, axis=1) != 0).float() flat_targets[:, Offsets.PAD] = 0 flat_targets[:, Offsets.EOS] = 0 flat_targets[:, Offsets.GO] = 0 if len(inputs.shape) > 2: max_per_vocab = inputs.max(0)[0] loss = self.crit(max_per_vocab, flat_targets) else: loss = self.crit(inputs, flat_targets) return self._norm(loss, inputs) class SequenceCriterion(nn.Module): def __init__(self, LossFn=nn.NLLLoss, avg='token'): super().__init__() if avg == 'token': # self.crit = LossFn(ignore_index=Offsets.PAD, reduction='elementwise-mean') self.crit = LossFn(ignore_index=Offsets.PAD, size_average=True) self._norm = self._no_norm else: self.crit = LossFn(ignore_index=Offsets.PAD, size_average=False) self._norm = self._batch_norm def _batch_norm(self, loss, inputs): return loss / inputs.size()[0] def _no_norm(self, loss, inputs): return loss def forward(self, inputs, targets): """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, .., C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. """ total_sz = targets.nelement() loss = self.crit(inputs.view(total_sz, -1), targets.view(total_sz)) return self._norm(loss, inputs) def pytorch_conv1d(in_channels, out_channels, fsz, unif=0, padding=0, initializer=None, stride=1, bias=True, groups=1): c = nn.Conv1d(in_channels, out_channels, fsz, padding=padding, stride=stride, bias=bias, groups=groups) if unif > 0: c.weight.data.uniform_(-unif, unif) elif initializer == "ortho": nn.init.orthogonal_(c.weight) if bias: nn.init.constant_(c.bias, 0) elif initializer == "he" or initializer == "kaiming": nn.init.kaiming_uniform_(c.weight) if bias: nn.init.constant_(c.bias, 0) elif initializer == "normal": nn.init.normal(mean=0, std=unif) if bias: nn.init.constant_(c.bias, 0) else: nn.init.xavier_uniform_(c.weight) if bias: nn.init.constant_(c.bias, 0) return c def tie_weight(to_layer, from_layer): """Assigns a weight object to the layer weights. This method exists to duplicate baseline functionality across packages. :param to_layer: the pytorch layer to assign weights to :param from_layer: pytorch layer to retrieve weights from """ to_layer.weight = from_layer.weight class BilinearAttention(nn.Module): def __init__(self, in_hsz: int, out_hsz: int = 1, bias_x: bool = True, bias_y: bool = True): super().__init__() self.in_hsz = in_hsz self.out_hsz = out_hsz self.bias_x = bias_x self.bias_y = bias_y a1 = in_hsz a2 = in_hsz if self.bias_x: a1 += 1 if self.bias_y: a2 += 1 self.weight = nn.Parameter(torch.Tensor(out_hsz, in_hsz + bias_x, in_hsz + bias_y)) self.reset_parameters() def reset_parameters(self): nn.init.zeros_(self.weight) #nn.init.orthogonal_(self.weight) def forward(self, x, y, mask): r""" Args: x: ``[B, T, H]``. y: ``[B, T, H]``. Returns: ~torch.Tensor: A scoring tensor of shape ``[batch_size, n_out, seq_len, seq_len]``. If ``n_out=1``, the dimension for ``n_out`` will be squeezed automatically. """ if self.bias_x is True: ones = torch.ones(x.shape[:-1] + (1,), device=x.device) x = torch.cat([x, ones], -1) if self.bias_y is True: ones = torch.ones(x.shape[:-1] + (1,), device=y.device) y = torch.cat([y, ones], -1) x = x.unsqueeze(1) y = y.unsqueeze(1) u = x @ self.weight s = u @ y.transpose(-2, -1) if self.out_hsz == 1: s = s.squeeze(1) s = s.masked_fill((mask.bool() == MASK_FALSE).unsqueeze(1), -1e9) return s class TripletLoss(nn.Module): """Provide a Triplet Loss using the reversed batch for negatives""" def __init__(self, model): super().__init__() self.score = nn.CosineSimilarity(dim=1) self.model = model def forward(self, inputs, targets): # reverse the batch and use as a negative example neg = targets.flip(0) query = self.model.encode_query(inputs) response = self.model.encode_response(targets) neg_response = self.model.encode_response(neg) pos_score = self.score(query, response) neg_score = self.score(query, neg_response) score = neg_score - pos_score score = score.masked_fill(score < 0.0, 0.0).sum(0) return score class ContrastiveLoss(nn.Module): def __init__(self, model, t=1.0, train_temperature=True): super().__init__() self.model = model if t is None: t = 1.0 self.t = nn.Parameter(torch.tensor(t).float(), requires_grad=train_temperature) def forward(self, inputs, targets): query = self.model.encode_query(inputs) # [B, H] response = self.model.encode_response(targets) # [B, H] query = F.normalize(query, p=2, dim=1) response = F.normalize(response, p=2, dim=1) labels = torch.arange(query.shape[0], device=query.device) logits = torch.mm(query, response.T) * self.t.exp() loss = F.cross_entropy(logits, labels) return loss class SymmetricContrastiveLoss(nn.Module): def __init__(self, model, t=1.0, train_temperature=True): super().__init__() self.model = model if t is None: t = 1.0 self.t = nn.Parameter(torch.tensor(t).float(), requires_grad=train_temperature) def forward(self, inputs, targets): query = self.model.encode_query(inputs) # [B, H] response = self.model.encode_response(targets) # [B, H] query = F.normalize(query, p=2, dim=1) response = F.normalize(response, p=2, dim=1) labels = torch.arange(query.shape[0], device=query.device) logits = torch.mm(query, response.T) * self.t.exp() loss_1 = F.cross_entropy(logits, labels) loss_2 = F.cross_entropy(logits.T, labels) loss = (loss_1 + loss_2) * 0.5 return loss class AllLoss(nn.Module): def __init__(self, model, warmup_steps=10000, reduction_type='sum'): r"""Loss from here https://arxiv.org/pdf/1705.00652.pdf see section 4 We want to minimize the negative log prob of y given x -log P(y|x) P(y|x) P(x) = P(x, y) Chain Rule of Probability P(y|x) = P(x, y) / P(x) Algebra P(y|x) = P(x, y) / \sum_\hat(y) P(x, y = \hat(y)) Marginalize over all possible ys to get the probability of x P_approx(y|x) = P(x, y) / \sum_i^k P(x, y_k) Approximate the Marginalization by just using the ys in the batch S(x, y) is the score (cosine similarity between x and y in this case) from our neural network P(x, y) = e^S(x, y) P(y|x) = e^S(x, y) / \sum_i^k e^S(x, y_k) log P(y|x) = log( e^S(x, y) / \sum_i^k e^S(x, y_k)) log P(y|x) = S(x, y) - log \sum_i^k e^S(x, y_k) -log P(y|x) = -(S(x, y) - log \sum_i^k e^S(x, y_k)) """ super().__init__() self.score = nn.CosineSimilarity(dim=-1) self.model = model self.max_scale = math.sqrt(self.model.embeddings.output_dim) self.steps = 0 self.warmup_steps = warmup_steps self.reduction = torch.mean if reduction_type == 'mean' else torch.sum def forward(self, inputs, targets): # This is the cosine distance annealing referred to in https://arxiv.org/pdf/1911.03688.pdf fract = min(self.steps / self.warmup_steps, 1) c = (self.max_scale-1) * fract + 1 self.steps += 1 # These will get broadcast to [B, B, H] query = self.model.encode_query(inputs).unsqueeze(1) # [B, 1, H] response = self.model.encode_response(targets).unsqueeze(0) # [1, B, H] # all_scores is now a batch x batch matrix where index (i, j) is the score between # the i^th x vector and the j^th y vector all_score = c * self.score(query, response) # [B, B] # The diagonal has the scores of correct pair, (i, i) pos_score = torch.diag(all_score) # vec_log_sum_exp will calculate the batched log_sum_exp in a numerically stable way # the result is a [B, 1] vector which we squeeze to make it [B] to match the diag # Because we are minimizing the negative log we turned the division into a subtraction here loss = pos_score - vec_log_sum_exp(all_score, -1).squeeze() # Batch loss loss = self.reduction(loss) # minimize the negative loss return -loss class CosineSimilarityLoss(nn.Module): def __init__(self, neg_value=0.3, pos_value=0.8): super().__init__() self.pos_value = pos_value self.neg_value = neg_value def forward(self, embeddings_reduction, labels): hsz = int(embeddings_reduction.shape[-1]//2) label_values = torch.zeros_like(labels, dtype=torch.float) label_values[labels == 0] = self.neg_value label_values[labels == 1] = self.pos_value output = torch.cosine_similarity(embeddings_reduction[:,:hsz], embeddings_reduction[:,hsz:]) loss = F.mse_loss(output, label_values.view(-1), reduction='mean') return loss class OnlineContrastiveLoss(nn.Module): def __init__(self): super().__init__() def forward(self, embeddings_reduction, labels): hsz = int(embeddings_reduction.shape[-1]//2) x = embeddings_reduction[:,:hsz] y = embeddings_reduction[:,hsz:] distance_matrix = 1-F.cosine_similarity(x, y) negs = distance_matrix[labels == 0] poss = distance_matrix[labels == 1] # select hard positive and hard negative pairs negative_pairs = negs[negs < (poss.max() if len(poss) > 1 else negs.mean())] positive_pairs = poss[poss > (negs.min() if len(negs) > 1 else poss.mean())] positive_loss = positive_pairs.pow(2).sum() negative_loss = F.relu(0.5 - negative_pairs).pow(2).sum() loss = positive_loss + negative_loss return loss class TwoHeadConcat(AttentionReduction): """Use two parallel SingleHeadReduction, and concatenate the outputs. It is used in the conveRT paper (https://arxiv.org/pdf/1911.03688.pdf)""" def __init__(self, d_model, dropout, scale=False, d_k=None, pooling='sqrt_length'): """Two parallel 1-head self-attention, then concatenate the output :param d_model: dim of the self-attention :param dropout: dropout of the self-attention :param scale: scale fo the self-attention :param d_k: d_k of the self-attention :return: concatenation of the two 1-head attention """ super().__init__() self.output_dim = 2*d_model self.reduction1 = SingleHeadReduction(d_model, dropout, scale=scale, d_k=d_k, pooling=pooling) self.reduction2 = SingleHeadReduction(d_model, dropout, scale=scale, d_k=d_k, pooling=pooling) def forward(self, inputs: torch.Tensor): x = inputs encoding1 = self.reduction1(x) encoding2 = self.reduction2(x) x = torch.cat([encoding1, encoding2], dim=-1) return x class ConveRTFFN(nn.Module): """Implementation of the FFN layer from the convert paper (https://arxiv.org/pdf/1911.03688.pdf)""" def __init__(self, insz, hszs, outsz, pdrop): """ :param insz: input dim :param hszs: list of hidden sizes :param outsz: output dim :param pdrop: dropout of each hidden layer """ super().__init__() self.dense_stack = DenseStack(insz, hszs, activation='gelu', pdrop_value=pdrop, skip_connect=True, layer_norm=True) self.final = Dense(hszs[-1], outsz) self.proj = Dense(insz, outsz) if insz != outsz else nn.Identity() self.ln1 = nn.LayerNorm(insz, eps=1e-6) self.ln2 = nn.LayerNorm(outsz, eps=1e-6) def forward(self, inputs): x = self.ln1(inputs) x = self.dense_stack(x) x = self.final(x) x = x + self.proj(inputs) return self.ln2(x) class DualEncoderModel(nn.Module): """Abstract base for dual encoders We can assume that our dual encoder needs to end up in the same output plane between the encoders, and we can define the set of losses here that we are likely to need for most. """ def __init__(self, in_sz: int, stacking_layers: Union[int, List[int]] = None, d_out: int = 512, ffn_pdrop=0.1, in_sz_2=None, output_layer=False, output_activation='tanh', output_shared=False): super().__init__() if not in_sz_2: in_sz_2 = in_sz if stacking_layers: stacking_layers = listify(stacking_layers) if stacking_layers: self.ff1 = ConveRTFFN(in_sz, stacking_layers, d_out, ffn_pdrop) self.ff2 = ConveRTFFN(in_sz_2, stacking_layers, d_out, ffn_pdrop) elif output_layer or in_sz != d_out or in_sz != in_sz_2: activation = output_activation if output_layer else None self.ff1 = Dense(in_sz, d_out, activation=activation) if in_sz == in_sz_2 and output_shared: self.ff2 = self.ff1 else: self.ff2 = Dense(in_sz_2, d_out, activation=activation) else: self.ff1 = nn.Identity() self.ff2 = nn.Identity() self.output_dim = d_out def encode_query_base(self, query: torch.Tensor) -> torch.Tensor: pass def encode_response_base(self, response: torch.Tensor) -> torch.Tensor: pass def encode_query(self, query: torch.Tensor) -> torch.Tensor: tensor = self.encode_query_base(query) return self.ff1(tensor) def encode_response(self, response: torch.Tensor) -> torch.Tensor: tensor = self.encode_response_base(response) return self.ff2(tensor) def forward(self, query, response): encoded_query = self.encode_query(query) encoded_response = self.encode_response(response) return encoded_query, encoded_response def create_loss(self, loss_type='symmetric', init_temp=None, learn_temp=False): if loss_type == 'all': return AllLoss(self) elif loss_type == 'all_mean': return AllLoss(self, reduction_type='mean') elif loss_type == 'contrastive': return ContrastiveLoss(self, init_temp, learn_temp) elif loss_type == 'symmetric': return SymmetricContrastiveLoss(self, init_temp, learn_temp) return TripletLoss(self) class BasicDualEncoderModel(DualEncoderModel): """A simple encoder where the encoders are injected and supply the `encode_query_base` and `encode_response_base` """ def __init__(self, encoder_1: nn.Module, encoder_2: nn.Module, stacking_layers: Union[int, List[int]] = None, d_out: int = 512, ffn_pdrop=0.1): super().__init__(encoder_1.output_dim, stacking_layers, d_out, ffn_pdrop, in_sz_2=encoder_2.output_dim) self.encoder_1 = encoder_1 self.encoder_2 = encoder_2 def encode_query_base(self, query: torch.Tensor) -> torch.Tensor: return self.encoder_1(query) def encode_response_base(self, response: torch.Tensor) -> torch.Tensor: return self.encoder_2(response) class PairedModel(DualEncoderModel): """Legacy model for transformer-based dual encoder This is a dual-encoder transformer model which shares the lower layer encoder transformer sub-graph The reduction layer is attention based and takes the same input as the transformer layers. It pools the reprs Finally, the feed-forward stacks are applied via subclassing. Note that this model predates the more abstract `AbstractDualEncoder` which could accomplish the same thing by injecting the same `nn.Module` for encoder_1 and encoder_2 consisting of the transformer and reduction """ def __init__(self, embeddings, d_model: int, d_ff: int, dropout: float, num_heads: int, num_layers: int, stacking_layers: Optional[nn.Module] = None, d_out: Optional[int] = None, d_k: Optional[int] = None, weight_std: float = 0.02, rpr_k: Optional[int] = None, reduction_d_k: int = 64, ffn_pdrop: float = 0.1, windowed_ra: bool = False, rpr_value_on: bool = False, reduction_type: str = "2ha", freeze_encoders: bool = False, layer_norms_after: bool = False, embeddings_reduction: str = 'sum', layer_norm_eps: float=1e-6, output_layer: bool = False, output_activation: str = 'tanh', output_shared: bool = False, transformer_type: Optional[str]=None, **kwargs): super().__init__(2*d_model if reduction_type.startswith("2") else d_model, stacking_layers, d_out if d_out is not None else d_model, ffn_pdrop, None, output_layer, output_activation, output_shared) reduction_type = reduction_type.lower() self.reduce_fn = self._reduce_3 if reduction_type == "2ha": self.reduction_layer = TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k) elif reduction_type == "2ha_mean": self.reduction_layer = TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean") elif reduction_type == "2ha_max": self.reduction_layer = TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max") elif reduction_type == "sha": self.reduction_layer = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k) elif reduction_type == "sha_mean": self.reduction_layer = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean") elif reduction_type == "sha_max": self.reduction_layer = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max") elif reduction_type == 'max': self.reduce_fn = self._reduce_1 self.reduction_layer = MaxPool1D(self.output_dim) elif reduction_type == 'mean': self.reduce_fn = self._reduce_1 self.reduction_layer = MeanPool1D(self.output_dim) elif reduction_type == 'cls' or reduction_type == 'zero': self.reduce_fn = self._reduce_0 else: raise Exception("Unknown exception type") self.weight_std = weight_std ra_type = kwargs.get('ra_type') self.transformer = TransformerEncoderStack(num_heads=num_heads, d_model=d_model, pdrop=dropout, layers=num_layers, activation='gelu', d_ff=d_ff, ffn_pdrop=ffn_pdrop, d_k=d_k, rpr_k=rpr_k, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on, layer_norms_after=layer_norms_after, layer_norm_eps=layer_norm_eps, ra_type=ra_type, transformer_type=transformer_type) self.embeddings = EmbeddingsStack({'x': embeddings}, 0.0, False, embeddings_reduction) self.freeze = freeze_encoders self.apply(self.init_layer_weights) def init_layer_weights(self, module): if isinstance(module, (nn.Linear, nn.Embedding, nn.LayerNorm)): module.weight.data.normal_(mean=0.0, std=self.weight_std) if isinstance(module, (nn.Linear, nn.LayerNorm)) and module.bias is not None: module.bias.data.zero_() def _reduce_3(self, encoded, att_mask): """The attention modules originally created for DE have 3 (redundant) inputs, so use all 3 here """ return self.reduction_layer((encoded, encoded, encoded, att_mask)) def _reduce_1(self, encoded, att_mask): """The standard reduction modules use an input and a length """ lengths = att_mask.squeeze(1).squeeze(1).sum(-1) return self.reduction_layer((encoded, lengths)) def _reduce_0(self, encoded, _): """The [CLS] or <s> reduction on the first token just needs the first timestep """ return encoded[:, 0] def encode_query_base(self, query): query_mask = (query != Offsets.PAD) att_mask = query_mask.unsqueeze(1).unsqueeze(1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': query}) encoded_query = self.transformer((embedded, att_mask)) encoded_query = self.reduce_fn(encoded_query, att_mask) return encoded_query def encode_response_base(self, response): response_mask = (response != Offsets.PAD) att_mask = response_mask.unsqueeze(1).unsqueeze(1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': response}) encoded_response = self.transformer((embedded, att_mask)) encoded_response = self.reduce_fn(encoded_response, att_mask) return encoded_response class TransformerBoWPairedModel(DualEncoderModel): """2 Encoders (E1, E2). E1 is a Transformer followed by attention reduction. E2 is just a pooling of embeddings """ def __init__(self, embeddings, d_model, d_ff, dropout, num_heads, num_layers, stacking_layers=None, d_out=512, d_k=None, weight_std=0.02, rpr_k=None, reduction_d_k=64, ffn_pdrop=0.1, windowed_ra=False, rpr_value_on=False, reduction_type_1="2ha", freeze_encoders=False, layer_norms_after=False, transformer_type: Optional[str]=None, **kwargs): super().__init__(d_model, stacking_layers, d_out, ffn_pdrop) reduction_type_1 = reduction_type_1.lower() if reduction_type_1 == "2ha": self.reduction_layer_1 = nn.Sequential(TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k), nn.Linear(2*d_model, d_model)) elif reduction_type_1 == "2ha_mean": self.reduction_layer_1 = nn.Sequential(TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean"), nn.Linear(2 * d_model, d_model)) elif reduction_type_1 == "2ha_max": self.reduction_layer_1 = nn.Sequential(TwoHeadConcat(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max"), nn.Linear(2 * d_model, d_model)) elif reduction_type_1 == "sha": self.reduction_layer_1 = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k) elif reduction_type_1 == "sha_mean": self.reduction_layer_1 = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="mean") elif reduction_type_1 == "sha_max": self.reduction_layer_1 = SingleHeadReduction(d_model, dropout, scale=False, d_k=reduction_d_k, pooling="max") else: raise Exception("Unknown exception type") self.weight_std = weight_std ra_type = kwargs.get('ra_type') self.transformer = TransformerEncoderStack(num_heads=num_heads, d_model=d_model, pdrop=dropout, layers=num_layers, activation='gelu', d_ff=d_ff, ffn_pdrop=ffn_pdrop, d_k=d_k, rpr_k=rpr_k, windowed_ra=windowed_ra, rpr_value_on=rpr_value_on, layer_norms_after=layer_norms_after, ra_type=ra_type, transformer_type=transformer_type) self.embeddings = EmbeddingsStack({'x': embeddings}) self.freeze = freeze_encoders self.reduction_layer_2 = MaxPool1D(d_out) if reduction_type_1.endswith('max') else MeanPool1D(d_out) self.apply(self.init_layer_weights) def init_layer_weights(self, module): if isinstance(module, (nn.Linear, nn.Embedding, nn.LayerNorm)): module.weight.data.normal_(mean=0.0, std=self.weight_std) if isinstance(module, (nn.Linear, nn.LayerNorm)) and module.bias is not None: module.bias.data.zero_() def encode_query_base(self, query): query_mask = (query != Offsets.PAD) att_mask = query_mask.unsqueeze(1).unsqueeze(1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': query}) encoded_query = self.transformer((embedded, att_mask)) encoded_query = self.reduction_layer_1((encoded_query, encoded_query, encoded_query, att_mask)) return encoded_query def encode_response_base(self, response): response_lengths = torch.sum(response != Offsets.PAD, dim=1) with torch.no_grad() if self.freeze else contextlib.ExitStack(): embedded = self.embeddings({'x': response}) encoded_response = self.reduction_layer_2((embedded, response_lengths)) return encoded_response class CudaTimer: """A CUDA timer context manager that can be used to track and record events The timer is only enabled if `MEAD_PYTORCH_TIMER` is true. If its enabled, it will cause a large slowdown (similar to `CUDA_LAUNCH_BLOCKING`). """ def __init__(self, name, sync_before=True): """ :param name: :param sync_before: """ self.enabled = str2bool(os.getenv('MEAD_PYTORCH_TIMER', False)) if self.enabled: self._name = name self._start = torch.cuda.Event(enable_timing=True) self._end = torch.cuda.Event(enable_timing=True) if sync_before: torch.cuda.synchronize() def __enter__(self): if self.enabled: self._start.record() def __exit__(self, exc_type, exc_value, exc_traceback): if self.enabled: self._end.record() torch.cuda.synchronize() elapsed = self._start.elapsed_time(self._end) print(f"({os.getpid()}) {self._name} {elapsed}") class WeightedNLLLoss(nn.Module): """Weight individual training examples """ def __init__(self): super().__init__() self.loss = nn.NLLLoss(reduction='none') def forward(self, pred, y, weight): loss = self.loss(pred, y) weight = weight.type_as(loss) return torch.dot(loss, weight)/len(weight) class WeightedMultiHeadNLLLoss(nn.Module): """Weight individual training examples with multiple heads """ def __init__(self): super().__init__() self.loss = nn.NLLLoss(reduction='none') def forward(self, preds, targets, weights): loss = sum([self.loss(pred, targets[:, i]) for i, pred in enumerate(preds)]) weights = weights.type_as(loss) return torch.dot(loss, weights)/len(weights) class WeightedSequenceLoss(nn.Module): """Weight individual training examples """ def __init__(self, LossFn: nn.Module = nn.NLLLoss, avg: str = "token"): super().__init__() self.avg = avg self.crit = LossFn(ignore_index=Offsets.PAD, reduction="none") if avg == 'token': self._reduce = self._mean else: self._reduce = self._sum def _mean(self, loss): return loss.mean(axis=1) def _sum(self, loss): return loss.sum(axis=1) def forward(self, inputs: torch.Tensor, targets: torch.Tensor, weight: torch.Tensor) -> torch.Tensor: """Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, T, C] The scores from the model. Batch First :param targets: torch.LongTensor, [B, T] The labels. :param weight: sample weights [B, ] :returns: torch.FloatTensor, The loss. """ total_sz = targets.nelement() batchsz = weight.shape[0] loss = self.crit(inputs.view(total_sz, -1), targets.view(total_sz)).view(batchsz, -1) # [B, T] loss = torch.dot(self._reduce(loss), weight.type_as(loss)) / batchsz return loss def extra_repr(self): return f"reduction={self.avg}"
en
0.792782
Generate a sequence mask of shape `BxT` based on the given lengths :param lengths: A `B` tensor containing the lengths of each example :param max_len: The maximum width (length) allowed in this mask (default to None) :return: A mask # 1 x T # B x 1 # Broadcast to B x T, compares increasing number to max Generate a sequence mask of shape `BxT` based on the given lengths, with a maximum value This function primarily exists to make ONNX tracing work better :param lengths: A `B` tensor containing the lengths of each example :param max_len: The maximum width (length) allowed in this mask (default to None) :return: A mask # 1 x T # B x 1 # Broadcast to B x T, compares increasing number to max #mask = mask.narrow(1, 0, arcs_h.shape[1]) Vectorized version of log-sum-exp :param vec: Vector :param dim: What dimension to operate on :return: Undo the sort on a batch of tensors done for packing the data in the RNN. :param batch: The batch of data batch first `[B, ...]` :param perm_idx: The permutation index returned from the torch.sort. :returns: The batch in the original order. # Add ones to the shape of the perm_idx until it can broadcast to the batch Infer the lengths of an input based on the idea the Offsets.PAD was used as the padding token. :param tensor: The data to infer the length of, should be either [B, T] or [T, B] :param dim: The dimension which contains the sequential signal :returns: A Tensor of shape `[B]` that has the lengths for example item in the batch Return either the unpacked inputs (2), or a `Tuple` of the input with None TODO: this function should probably be changed to always return the lengths second. To do this, we just need a sentinel value, e.g. <PAD> (0). The problem with doing this is that it might be possible to generate <PAD> in the middle of the tensor which would make that length invalid. :param inputs: Either a sequence of the `(tensor, length)` or just the `tensor` :return: A `Tuple` of `(tensor, length)` or `(tensor, None)` Inverted dropout that applies the same mask at each time step. Variational Dropout :param pdrop: the percentage to drop # Create a mask that covers a single time step # Broadcast the mask over the sequence Computes the loss over a sequence A class that applies a Loss function to sequence via the folding trick. :param LossFn: A loss function to apply (defaults to `nn.NLLLoss`) :param avg: A divisor to apply, valid values are `token` and `batch` Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, .., C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. Use Label smoothing from `Szegedy et. al., 2015`_ to temper model confidence. Implements add-gamma smoothing where the probability mass of the gold label distribution is smoothed across classes. This implementation is based on `OpenNMT-py`_ but has been adapted to not require the vocabulary size up front. .. _Szegedy et. al., 2015: https://arxiv.org/abs/1512.00567 .. _OpenNMY-py: https://github.com/OpenNMT/OpenNMT-py/blob/938a4f561b07f4d468647823fab761cfb51f21da/onmt/utils/loss.py#L194 :param output: The model outputs, [B, V] :param target: The target labels, [B] Do a mean pool while accounting for the length of a sequence Set up pooling module :param outsz: The output dim, for dowstream access :param batch_first: Is this module batch first or time first? Apply mean pooling on the valid inputs :param inputs: A tuple of `(input, lengths)` :return: Pooled output # Regardless of whether the input is `[B, T, H]` or `[T, B, H]` the shape after # the sum is `[B, H]` so the lengths (of shape `[B]`) should be unsqueezed to # `[B, 1]` in order to broadcast Do a max-pooling operation with or without a length given If we are given a tuple as input, we will use the length, otherwise we will do an operation without masking :param inputs: either a tuple of `(input, lengths)` or a tensor `input` :return: A pooled tensor # If tensor = `[B, T, H]` # mask = `[B, T, 1]` # If tensor = `[T, B, H]` # mask = `[T, B, 1]` # So it will mask all the values in H past the right length # Fill masked with very negative so it never gets selected # Torch only added this module in 1.4.0, shim #Code taken from: https://github.com/huggingface/transformers/blob/766d4bf7920213bdd8a8afb42a72719190124568/src/transformers/activations.py#L27 Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415 Get back an `nn.Module` by string name of the activation operator :param name: A string name of the operation :return: A module associated with that string Concat forward and backword state vectors. The shape of the hidden is `[#layers * #dirs, B, H]`. The docs say you can separate directions with `h.view(#l, #dirs, B, H)` with the forward dir being index 0 and backwards dir being 1. This means that before separating with the view the forward dir are the even indices in the first dim while the backwards dirs are the odd ones. Here we select the even and odd values and concatenate them :param h: The hidden shape as it comes back from PyTorch modules Convert the bidirectional out of an RNN so the forward and backward values are a single vector. Perform a 1D convolution with output size same as input size To make this operation work as expected, we cannot just use `padding=kernel_size//2` inside of the convolution operation. Instead, we zeropad the input using the `ConstantPad1d` module Create a 1D conv to produce the same output size as input :param in_channels: The number of input feature maps :param out_channels: The number of output feature maps :param kernel_size: The kernel size :param bias: Is bias on? :param groups: Number of conv groups Do convolution1d on an input tensor, `[B, C, T]` :param x: The input tensor of shape `[B, C, T]` :return: The output tensor of shape `[B, H, T]` 1D Convolutional layer encoder with given activation function, optional dropout This module takes in a temporal signal of either shape `[B, C, T]` or `[B, T, C]`, depending on the constructor and produces an output signal of the same orientation (`[B, H, T]` or `[B, T, H]`, respectively). We default to `[B, T, H]` orientation to make it more convenient for typical layout, but this requires transposing the last 2 dims before and after the convolution operation. Construct the encoder with optional dropout, given activation, and orientation :param insz: The number of input feature maps :param outsz: The number of output feature maps (or hidden size) :param filtsz: The kernel size :param pdrop: The amount of dropout to apply, this defaults to 0 :param activation: The activation function by name, defaults to `relu` :param bias: Use bias? :param groups: How many conv groups. Defaults to 1 :param hidden_last: PyTorch only! If `True` the orientatiation is `[B, T, H]`, o.w. `[B, H, T]` expected Create a stack of convolutional encoders with residual connections between, using the `ConvEncoder` underneath This creates an encoder stack of convolutions, finally returning the last temporal output. Each layer uses zero-padding which causes the output of the convolution at each layer to be the same length. As in the `ConvEncoder` we support input tensor shapes of `[B, C, T]` or `[B, T, C]` depending on the constructor initialization, and transpose underneath the input and output of the stack if the orientation is defaulted to `[B, T, C]` Construct the encoder stack :param insz: The input number of feature maps :param outsz: The output number of feature maps :param filtsz: The kernel size :param nlayers: The number of layers in the stack (defaults to a single layer) :param pdrop: The amount of dropout to apply (defaults to `0`) :param activation: The activation function to use as a string, defaults to `relu` :param bias: Use bias? :param groups: How many conv groups. Defaults to 1 :param hidden_last: PyTorch only! If `True` the orientatiation is `[B, T, H]`, o.w. `[B, H, T]` expected Apply a stack of 1D convolutions with residual connections between them :param input: A tensor of shape `[B, T, C]` or `[B, C, T]` depending on value of `hidden_last` :return: A tensor of shape `[B, T, H]` or `[B, H, T]` depending on the value of `hidden_last` Transpose the 2nd and 3rd dim of a tensor Utility layer to convert from `[B, T, H]` to `[B, H, T]` Permute the dimensions, first goes to third, second goes to first, last moves to second Utility layer to convert from `[T, B, H]` to `[B, H, T]` Transpose the first 2 dims Utility layer to convert from `[T, B, H]` to `[B, T, H]` Transpose the first 2 dims Utility layer to convert from `[B, T, H]` to `[T, B, H]` Utility layer to convert from `[B, H, T]` to `[B, T, H]` Layer of parallel convolutions with varying filter sizes followed by max over time pooling This module takes an input tensor of any orientation based on its constructor, and pools its output to shape `[B, H]`, where `H` is `outsz * len(filtsz)` Constructor for a parallel convolution from any orientation tensor input :param insz: The number of input feature maps :param outsz: The number of output feature maps :param filtsz: The kernel size as a list of parallel filters to apply, e.g. `[3, 4, 5]` :param activation: An activation function by name to apply :param input_fmt: A string for the orientation. Valid values are `bth` or `btc` meaning hidden units last, `bht` or `bct` meaning the temporal dim last or `tbh` or `tbc` meaning the hidden units last and the temporal dim first # Add the module so its managed correctly Transform the input to `[B, C, T]` from any orientation and perform parallel 1D convs and max over time pool :param inputs: An input tensor of any format specified in the constructor :return: A `[B, H]` tensor representing the pooled outputs # In Conv1d, data BxCxT, max over time # self.conv_drop(mots) Highway layer as defined in https://arxiv.org/abs/1505.00387 Highway layer constructor :param input_size: The input hidden size :param kwargs: Take a tensor in and produce the highway layer output :param input: Input tensor :return: output tensor Utility function that wraps a linear (AKA dense) layer creation, with options for weight init and bias A stacked LSTM cells applied at a timestep Apply a stack of LSTMs :param input: The input to the first LSTM `[B, H]` :param hidden: The previous `(h, c)` where `h=(h_0, h_1,..)`, `c=(c_0, c_1,..)` :return: The output and hidden `(h, c)` where `h=(h_0, h_1,..)`, `c=(c_0, c_1,..)` A stacked GRU cells applied at a timestep Apply a stack of GRUs :param input: The input to the first LSTM `[B, H]` :param hidden: The previous `h` where `h=(h_0, h_1,..)` :return: The output and hidden `h` where `h=(h_0, h_1,..)` Dense (Linear) layer with optional activation given This module is the equivalent of the tf.keras.layer.Dense, module with optional activations applied Constructor for "dense" or "linear" layer, with optional activation applied :param insz: The number of hidden units in the input :param outsz: The number of hidden units in the output :param activation: The activation function by name, defaults to `None`, meaning no activation is applied :param unif: An optional initialization value which can set the linear weights. If given, biases will init to 0 :param initializer: An initialization scheme by string name: `ortho`, `kaiming` or `he`, `xavier` or `glorot` Run a linear projection over the input, followed by an optional activation given by constructor :param input: the input tensor :return: the transformed output Do weight tying from the input parameter This module never copies the weight pointer, it lazily accesses to allow the tied variable to reset its parameters after initialization. This is helpful for cases where we have LMs and are reloading them after they have been initially created Create a residual block by wrapping an layer with a residual connection Wrap an layer with a residual connection :param layer: This layer will be applied to the input and added to the input :param kwargs: Apply a residual block :param input: A tensor to use as input and to add to output :return: The residual connection output Subclass of ResidualBlock(Dense) with an activation function given Create a `SkipConnection` :param input_size: The input dimension size :param activation: A string activation name This is a wrapper function around a stacked RNN cell :param insz: The input dimensions :param hsz: The hidden dimensions :param rnntype: An RNN type `gru` or `lstm` :param nlayers: The number of layers to stack :param dropout: The amount of dropout :return: Wrapper around `torch.nn.LSTM`, mainly for weight initialization options :param insz: The input dimension :param hsz: The number of hidden units :param rnntype: A string description of the type of LSTM: `bi?lstm` or `lstm` :param nlayers: The number of layers :param dropout: How much dropout to apply :param unif: if uniform initialization, what range? :param batch_first: Should we do the RNN batch first or time first :param initializer: An optional string representing a style of initialization `ortho`, `he`/`kaiming`, `xavier`/`glorot` :return: An LSTM # , bias=False) The LSTM encoder is a base for a set of encoders producing various outputs. All LSTM encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`) *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `LSTMEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `LSTMEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. Produce a stack of LSTMs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per LSTM :param nlayers: The number of layers of LSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: PyTorch only! Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN # def forward(self, inputs: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[torch.Tensor, torch.Tensor]: # tbc, lengths = tensor_and_lengths(inputs) # packed = torch.nn.utils.rnn.pack_padded_sequence(tbc, lengths, batch_first=self.batch_first) # output, hidden = self.rnn(packed) # output, _ = torch.nn.utils.rnn.pad_packed_sequence(output, batch_first=self.batch_first) # return self.output_fn(output, hidden) # def output_fn(self, output, state): # return output, self.extract_top_state(state) Get a view of the top state of shape [B, H]` :param state: :return: # Select the topmost state with -1 and the only direction is forward (select with 0) LSTM encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of LSTMs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` LSTM encoder producing the hidden state and the output, where the input doesnt require any padding PyTorch note: This type of encoder doesnt inherit the `LSTMEncoderWithState` base :param insz: The size of the input :param hsz: The number of hidden units per LSTM :param nlayers: The number of layers of LSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param batch_first: PyTorch only! do batch first or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN :param input_and_prev_h: The input at this timestep and the previous hidden unit or `None` :return: Raw `torch.nn.LSTM` output ##concat_state_dirs(hidden) LSTM encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a tuple of hidden vector `[L, B, H]` and context vector `[L, B, H]`, respectively *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H]` or `[B, H, S]` , and tuple of hidden `[L, B, H]` and context `[L, B, H]` LSTM encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state # TODO: this module only exists in pytorch. Do we eliminate it or put it in both? BiLSTM encoder base for a set of encoders producing various outputs. All BiLSTM encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`). Because its bidirectional, half of the hidden units given in the constructor will be applied to the forward direction and half to the backward direction, and these will get concatenated. *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `BiLSTMEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `BiLSTMEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. Produce a stack of LSTMs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per BiLSTM (`hsz//2` used for each direction and concatenated) :param nlayers: The number of layers of BiLSTMs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN # Select the topmost state with -1 and the only direction is forward (select with 0) # TODO: this module only exists in pytorch. Do we eliminate it or put it in both? BiLSTM encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a tuple of hidden vector `[L, B, H]` and context vector `[L, B, H]`, respectively *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H] or `[B, H, S]` , and tuple of hidden `[L, B, H]` and context `[L, B, H]` BiLSTM encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of LSTMs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` BiLSTM encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state # TODO: Add this to TF or remove The GRU encoder is a base for a set of encoders producing various outputs. All GRU encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`) *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `GRUEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `GRUEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. Produce a stack of GRUs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per GRU :param nlayers: The number of layers of GRUs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: PyTorch only! Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN GRU encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. Take in a tuple of the sequence tensor `[T, B, H]` or `[B, T, H]` and its length, produce output sequence :param inputs: A tuple of the sequence tensor and its length :return: A sequence tensor of shape `[T, B, H]` or `[B, T, H]` GRU encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a hidden vector `[L, B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H]` or `[B, H, S]` , and a hidden tensor `[L, B, H]` GRU encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state BiGRU encoder base for a set of encoders producing various outputs. All BiGRU encoders inheriting this class will trim the input to the max length given in the batch. For example, if the input sequence is `[B, T, C]` and the `S = max(lengths)` then the resulting sequence, if produced, will be length `S` (or more precisely, `[B, S, H]`). Because its bidirectional, half of the hidden units given in the constructor will be applied to the forward direction and half to the backward direction, and these will get concatenated. *PyTorch Note*: In PyTorch, its more common for the input shape to be temporal length first (`[T, B, H]`) and this is the PyTorch default. There is an extra parameter in all of these models called `batch_first` which controls this. Currently, the default is time first (`batch_first=False`), which differs from TensorFlow. To match the TF impl, set `batch_first=True`. *PyTorch Note*: Most `BiGRUEncoder` variants just define the `forward`. This module cannot provide the same utility as the TensorFlow `BiGRUEncoder` base right now, because because the JIT isnt handling subclassing of forward properly. Produce a stack of GRUs with dropout performed on all but the last layer. :param insz: The size of the input :param hsz: The number of hidden units per BiGRU (`hsz//2` used for each direction and concatenated) :param nlayers: The number of layers of BiGRUs to stack :param pdrop: The probability of dropping a unit value during dropout, defaults to 0 :param requires_length: Does this encoder require an input length in its inputs (defaults to `True`) :param batch_first: Should we do batch first input or time-first input? Defaults to `False` (differs from TF!) :param unif: PyTorch only! Initialization parameters for RNN :param initializer: PyTorch only! A string describing optional initialization type for RNN # Select the topmost state with -1 and the only direction is forward (select with 0) # TODO: normalize across backends or remove BiGRU encoder that passes along the full output and hidden states for each layer Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` This returns a 2-tuple of outputs `[B, S, H]` where `S = max(lengths)`, for the output vector sequence, and a hidden vector `[L, B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor `[B, S, H] or `[B, H, S]` , and a hidden vector `[L, B, H]` BiGRU encoder to produce the transduced output sequence. Takes a tuple of tensor, shape `[B, T, C]` and a lengths of shape `[B]` and produce an output sequence of shape `[B, S, H]` where `S = max(lengths)`. The lengths of the output sequence may differ from the input sequence if the `max(lengths)` given is shorter than `T` during execution. *PyTorch Note:* The input shape of is either `[B, T, C]` or `[T, B, C]` depending on the value of `batch_first`, and defaults to `[T, B, C]` for consistency with other PyTorch modules. The output shape is of the same orientation. Take in a tuple of `(sequence, lengths)` and produce and output tensor of the last layer of GRUs The value `S` here is defined as `max(lengths)`, `S <= T` :param inputs: sequence of shapes `[B, T, C]` or `[T, B, C]` and a lengths of shape `[B]` :return: A tensor of shape `[B, S, H]` or `[S, B, H]` depending on setting of `batch_first` GRU encoder that returns the top hidden state Takes a tuple containing a tensor input of shape `[B, T, C]` and lengths of shape `[B]` and returns a hidden unit tensor of shape `[B, H]` *PyTorch note*: Takes a vector of shape `[B, T, C]` or `[B, C, T]`, depending on input specification of `batch_first`. Also note that in PyTorch, this defaults to `True` :param inputs: A tuple containing the input tensor `[B, T, C]` or `[B, H, C]` and a length `[B]` :return: An output tensor of shape `[B, H]` representing the last RNNs hidden state This reduction assumes paired input and subtracts the two to get a distance It is useful for training sentence encoders and is used, for example, in SentenceBERT For this to work we assume that the inputs are paired, and subtract them # We could actually project if we needed, or at least should validate Takes in a dictionary where the keys are the input tensor names, and the values are the embeddings :param embeddings_dict: dictionary of each feature embedding :param dropout_rate: The dropout rate (0.0 means no dropout, 1.0 means complete) # TODO: should we make a registry of options? This method performs "embedding" of the inputs. The base method here then concatenates along depth dimension to form word embeddings :return: A 3-d vector where the last dimension is the concatenated dimensions of all embeddings # Its a hair faster to do this than using isinstance A stack of one or more hidden layers Stack 1 or more hidden layers, optionally (forming an MLP) :param insz: The number of input units :param hsz: The number of hidden units :param activation: The name of the activation function to use :param pdrop_value: The dropout probability :param init: The initializer :param skip_connect: whether use skip connection when insz is equal to outsz for a layer :param layer_norm: whether use layer norm in each layer Stack 1 or more hidden layers, optionally (forming an MLP) :param inputs: The fixed representation of the model :Keyword Arguments: * *hsz* -- (``int``) The number of hidden units (defaults to `100`) :return: The final layer # Output(t) = B x H x 1 # Keys = B x T x H # a = B x T x 1 # a = B x T # Want to apply over context, scaled by a # (B x 1 x T) (B x T x H) = (B x 1 x H) # a = B x T # Want to apply over context, scaled by a # (B x 1 x T) (B x T x H) = (B x 1 x H) -> (B x H) # (B x 2H) Composite pooling allows for multiple sub-modules during pooling to be used in parallel Note, this currently requires that each submodel is an eight_mile model with an `output_dim` attr This provides an idiom for classification consisting of multiple phases In the first phase, we embed the input tensors, and subsequently pool them to a fixed width representation. Finally, we allow multiple hidden "stacking" layers, ultimately ending in a projection to the output space Wrapper layer to remove lengths from the input Wrapper for any layer that surrounds it with dropout Create a dropout wrapper around the given layer :param layer: Some sort of layer :param pdrop: A dropout value Apply the layer followed by dropout :param inputs: input tensor :return: output transformed by the held layer and subsequent dropout Wrapper for any layer that surrounds it with dropout This exists primarily for the LSTMEncoderWithState to allow dropout on the output while passing back the hidden state Create a dropout wrapper around the given layer :param layer: Some sort of layer :param pdrop: A dropout value Apply the layer followed by dropout :param inputs: input tensor :return: output transformed by the held layer and subsequent dropout Create a mask to enforce span sequence transition constraints. Returns a Tensor with valid transitions as a 0 and invalid as a 1 for easy use with `masked_fill` # dtype=unary.dtype fails, with prim_dtype error on torch 1.7.1 # r, start_idx: int, end_idx: int, norm = lambda x, y: x Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] :param trans: torch.FloatTensor: [1, N, N] :param norm: Callable: This function should take the initial and a dim to normalize along. :return: torch.LongTensor: [T, B] the padded paths :return: torch.FloatTensor: [B] the path scores # Alphas: [B, 1, N] # alphas = self.norm(alphas) # This part generates a warning # Add end tag # Flip lengths # Get new best tag candidate # We are going backwards now, if flipped length was passed # these you aren't in your real results yet # Mask out the extra tags (This might be pointless given thathatt anything that # will use this as a dense tensor downstream will mask it itself?) # dtype=unary.dtype fails, with prim_dtype error on torch 1.7.1 Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] :param trans: torch.FloatTensor: [1, N, N] :param norm: Callable: This function should take the initial and a dim to normalize along. :return: torch.LongTensor: [T, B] the padded paths :return: torch.FloatTensor: [B] the path scores # Alphas: [B, 1, N] # Add end tag # Flip lengths # Get new best tag candidate # We are going backwards now, if flipped length was passed # these you aren't in your real results yet # Mask out the extra tags (This might be pointless given that anything that # will use this as a dense tensor downstream will mask it itself?) A Greedy decoder and loss module for taggers. :param num_tags: `int` The number of output classes :param constraint_mask: `Tensor[1, N, N]` A mask with valid transitions as 1 and invalid as 0 :param batch_first: `bool` Should the batch dimensions be first? :param reduction: `str` Should the loss be calculated at the token level or batch level # FIXME: we cant do it like this if using TorchScript # If there is a constraint mask do a masked viterbi # , scores # Decoding doesn't care about batch/time first # The mask gets generated as batch first # , None Initialize the object. :param num_tags: int, The number of tags in your output (emission size) :param constraint: torch.ByteTensor, Constraints on the transitions [1, N, N] :param idxs: Tuple(int. int), The index of the start and stop symbol in emissions. :param batch_first: bool, if the input [B, T, ...] or [T, B, ...] Note: if idxs is none then the CRF adds these symbols to the emission vectors and n_tags is assumed to be the number of output tags. if idxs is not none then the first element is assumed to be the start index and the second idx is assumed to be the end index. In this case n_tags is assumed to include the start and end symbols. Neg Log Loss with a Batched CRF. :param unary: torch.FloatTensor: [T, B, N] or [B, T, N] :param tags: torch.LongTensor: [T, B] or [B, T] :param lengths: torch.LongTensor: [B] :return: torch.FloatTensor: [B] # Convert from [B, T, N] -> [T, B, N] Score a batch of sentences. :param unary: torch.FloatTensor: [T, B, N] :param tags: torch.LongTensor: [T, B] :param lengths: torch.LongTensor: [B] :param min_length: torch.LongTensor: [] :return: torch.FloatTensor: [B] # [N, N] # [1, B] # [T + 1, B] # Unfold gives me all slices of size 2 (this tag next tag) from dimension T # Move the pair dim to the front and split it into two # Pull out the values of the tags from the unary scores. For CRF forward on a batch. :param unary: torch.FloatTensor: [T, B, N] :param lengths: torch.LongTensor: [B] :return: torch.FloatTensor: [B] # alphas: [B, 1, N] # alphas.requires_grad = True # [1, N, N] # unary_t: [B, N] # [B, N, 1] # Broadcast alphas along the rows of trans # Broadcast trans along the batch of alphas # [B, 1, N] + [1, N, N] -> [B, N, N] # Broadcast unary_t along the cols of result # [B, N, N] + [B, N, 1] -> [B, N, N] # If we haven't reached your length zero out old alpha and take new one. # If we are past your length, zero out new_alpha and keep old one. # if self.batch_first: # forward = forward.transpose(0, 1) Do Viterbi decode on a batch. :param unary: torch.FloatTensor: [T, B, N] or [B, T, N] :param lengths: torch.LongTensor: [B] :return: torch.LongTensor: [B] the paths :return: torch.FloatTensor: [B] the path score # [1, N, N] # TODO: make this a separate model! # transduced = self.transducer_model(embedded) Creation function for making an nn.Embedding with the given weights :param weights: The weights to use :param finetune: Should we fine-tune the embeddings or freeze them Creates a lower triangular mask to mask future :param size: Temporal length :return: A tensor of type `uint8` that is 1s along diagonals and below, zero o.w Attention weights are applied for each value, but in a series of efficient matrix operations. In the case of self-attention, the key and query (used to create the attention weights) and values are all low order projections of the same input. :param a: The attention weights [B, H, T_q, T_k] :param values: The values [B, H, T_k, D] :returns: A tensor of shape [B, H, T_q, D] Scaled dot product attention, as defined in https://arxiv.org/abs/1706.03762 We apply the query to the keys to receive our weights via softmax in a series of efficient matrix operations. In the case of self-attention the key and query are all low order projections of the same input. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] Attention with Linear Biases, defined in https://arxiv.org/pdf/2108.12409.pdf :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) # [T_q, T_k] # [H, T_q, T_k] # [1, H, T_q, T_k] # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] Taken from https://github.com/tensorflow/mesh/blob/bbb6ce7917e2a8ef1f3dc6990fcacd4f3b075acd/mesh_tensorflow/transformer/transformer_layers.py#L1014 # now n is in the range [0, inf) Relative Attention described in https://arxiv.org/abs/1910.10683 :param query: a query for alignment. :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] # [T_q, T_k] # [H, T_q, T_k] # [1, H, T_q, T_k] Taken from https://github.com/tensorflow/mesh/blob/bbb6ce7917e2a8ef1f3dc6990fcacd4f3b075acd/mesh_tensorflow/transformer/transformer_layers.py#L1014 # now n is in the range [0, inf) Relative Attention described in https://arxiv.org/abs/1910.10683 :param query: a query for alignment. :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: A tensor that is (BxHxTxT) # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) # [B, 1, 1, T_k] broadcast to [B, 1, T_q, T_k] This form of attention is specified in Shaw et al 2018: https://www.aclweb.org/anthology/N18-2074.pdf Take in a tuple of tensors corresponding to the query, key, value, edges_key, edges_value and mask variables :param q_k_v_ek_ev_m: A tuple consisting of query, key, value, `edges_key`, `edges_value` and `mask` respectively :return: An updated value Tensor Attention weights are applied for each value, but in a series of efficient matrix operations. In the case of self-attention, the key and query (used to create the attention weights) and values are all low order projections of the same input. :param a: The attention weights [B, H, T_q, T_k] :param value: The values [B, H, T_k, D] :param edge_value: The edge values [T_q, T_k, D] :returns: A tensor of shape [B, H, T, D] # [B, H, T_q, D] # (T_q, BxH, T_k) # (T_q, BxH, D) Scaled dot product attention, as defined in https://arxiv.org/abs/1706.03762 We apply the query to the keys to receive our weights via softmax in a series of efficient matrix operations. In the case of self-attntion the key and query are all low order projections of the same input. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :param edges_key: a matrix of relative embeddings between each word in a sequence [T_q x T_k x D] :return: A tensor that is (B x H x T_q x T_k) # (., H, T_q, T_k) = (., H, T_q, D) x (., H, D, T_k) # [T_q, B*H, d_k] # [T_q, B*H, T_k] # [B, H, T_q, T_k] # only for cross-attention T_q != T_k. for such case, mask should be src_mask, which is a sequence_mask with # dimension [B, 1, 1, T_k], and will be broadcast to dim of scores: Unfold a tensor by applying a sliding window on a certain dimension with step 1 and padding of 0's. The window dimension is added as the last dimension :param tensor: the tensor to be unfolded, with shape [d_1, d_2, ..., T, ..., d_n] :param dim: the dimension along which unfolding is applied :param window_sz: sliding window size, need to be an odd number :return: the unfolded tensor with shape [d_1, d_2, ..., T, ..., d_n, window_sz] # torch.nn.functional.pad apply backwardly from the last dimension This class implements windowed relative attention, i.e. preventing attention beyond rpr_k. For efficiency, _attention and _update are implemented in a different way. Transform mask into the unfolded format. # mask is from a subsequent mask, with [1, 1, T, T] or [B, 1, T, T] # expand sequence/subsequent mask into a uniform dim # pad both sides with rpr_k, [B, 1, T, T + 2*rpr_k] # indices of a sliding window, [T, W] # [B, 1, T, W]): # mask is a sequence mask [B, 1, 1, T] # [B, 1, 1, T, W] # [B, 1, T, W] Implementation of attention considering RA masking: using torch.Tensor.unfold to create an extra dimension representing the sliding window. Then when applying matmul, Q, K, V share the same T dimension. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :param rpr_key: tensor of the rpr_key embeddings [W, d_k] :return: A tensor that is [B, H, T, 1, W] to be matmul with values # [B, H, T, 1, d_k] # [B, H, T, d_k, W] # [1, 1, 1, d_k, W] # [B, H, T, 1, W] # [B, H, T, 1, W] # [B, 1, T, 1, W] # a has dim [B, H, T, 1, W] # [B, H, T, W, d_value] # [B, H, T, 1, d_value] # [1, 1, 1, W, d_value] # [B, H, T, 1, d_value] # [B, H, T, d_value] # [B, H, T, 1, D] + [B, H, 1, T, D] = [B, H, T, T, D] # [B, H, T, T, D] @ [D, 1] = [B, H, T, T, 1] # [B, H, T, T] Multi-headed attention from https://arxiv.org/abs/1706.03762 via http://nlp.seas.harvard.edu/2018/04/03/attention.html Multi-headed attention provides multiple looks of low-order projections K, Q and V using an attention function (specifically `scaled_dot_product_attention` in the paper. This allows multiple relationships to be illuminated via attention on different positional and representational information from each head. The number of heads `h` times the low-order projection dim `d_k` is equal to `d_model` (which is asserted upfront). This means that each weight matrix can be simply represented as a linear transformation from `d_model` to `d_model`, and partitioned into heads after the fact. Finally, an output projection is applied which brings the output space back to `d_model`, in preparation for the sub-sequent `FFN` sub-layer. There are 3 uses of multi-head attention in the Transformer. For encoder-decoder layers, the queries come from the previous decoder layer, and the memory keys come from the encoder. For encoder layers, the K, Q and V all come from the output of the previous layer of the encoder. And for self-attention in the decoder, K, Q and V all come from the decoder, but here it is masked to prevent using future values Constructor for multi-headed attention :param h: The number of heads :param d_model: The model hidden size :param dropout (``float``): The amount of dropout to use :param scale: Should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly :param ra_type: If there is an attention bias term, that will be encapsulated in the attention computation # for multi-headed attention, w_V projects to h heads, each head has dim d_k; for single headed attention, w_V # project to 1 head with dim d_model # w_O is not needed for single headed attention # TODO: pass through options # TODO: pass through options Low-order projections of query, key and value into multiple heads, then attention application and dropout :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: Multi-head attention output, result of attention application to sequence (B, T, d_model) # (B, H, T, D) Multi-headed relative attention from Shaw et al 2018 (https://www.aclweb.org/anthology/N18-2074.pdf) This method follows the same approach of MultiHeadedAttention, but it computes Relative Position Representations (RPR) which are used as part of the attention computations. To facilitate this, the model has its own internal embeddings lookup table, and it has an updated computation for both the attention weights and the application of those weights to follow them. Constructor for multi-headed attention :param num_heads: The number of heads :param d_model: The model hidden size :param rpr_k: distance within which relative positional embedding will be considered :param windowed_ra: whether prevent attention beyond rpr_k :param dropout (``float``): The amount of dropout to use :param scale: Should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly # for multi-headed attention, w_V projects to h heads, each head has dim d_k; for single headed attention, w_V # project to 1 head with dim d_model # w_O is not needed for sinlge headed attention Create a matrix shifted by self.rpr_k and bounded between 0 and 2*self.rpr_k to provide 0-based indexing for embedding # [q_len, k_len] # [q_len, k_len, d_k] Low-order projections of query, key and value into multiple heads, then attention application and dropout :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: Multi-head attention output, result of attention application to sequence (B, T, d_model) # key and value have the same length, but query can have a different length # (B, H, T, D) :param inputs: `(x, mask)` :return: The output tensor :param inputs: `(x, mask)` :return: The output tensor :param inputs: `(x, mask)` :return: The output tensor Spatial gating unit There are 2 ways we can look at this unit, as an MLP or a Conv with kernel length 1 l = nn.Linear(T, T) c = nn.Conv1d(T, T, 1) l(x.transpose(1, 2)).transpose(1, 2) c(x) Following https://arxiv.org/pdf/2105.08050.pdf Do gMLP forward TODO: we arent using the mask ATM :param inputs: `(x, mask)` :return: The output tensor # The shortcut here happens pretty early # A "channel" norm # A "channel" FFN # gelu according to https://arxiv.org/pdf/2105.08050.pdf # "spatial" projection (over T) # "channel" projection # transformer_type == "pre-layer-norm-before-resconn" Following https://arxiv.org/pdf/2105.08050.pdf # transformer_type == "pre-layer-norm-before-resconn" Update the length of a generated tensor based on the first EOS found. This is useful for a decoding situation where tokens after an EOS can be something other than EOS. This also makes sure that a second generated EOS doesn't affect the lengths. :param lengths: `torch.LongTensor`: The lengths where zero means an unfinished sequence. :param eoses: `torch.ByteTensor`: A mask that has 1 for sequences that generated an EOS. :param idx: `int`: What value to fill the finished lengths with (normally the current decoding timestep). :returns: `torch.Tensor`: The updated lengths tensor (same shape and type). # If a length is 0 it has never had a length set so it is eligible to have # this EOS be the length. # If this length can be updated AND this token is an eos Calculate a length penalty from https://arxiv.org/pdf/1609.08144.pdf The paper states the penalty as (5 + |Y|)^a / (5 + 1)^a. This is implemented as ((5 + |Y|) / 6)^a for a (very) tiny performance boost :param lengths: `torch.LongTensor`: [B, K] The lengths of the beams. :param alpha: `float`: A hyperparameter. See Table 2 for a search on this parameter. :returns: `torch.FloatTensor`: [B, K, 1] The penalties. A dummy function that returns a no penalty (1). Repeat a tensor while keeping the concept of a batch. :param t: `torch.Tensor`: The tensor to repeat. :param K: `int`: The number of times to repeat the tensor. :param dim: `int`: The dimension to repeat in. This should be the batch dimension. :returns: `torch.Tensor`: The repeated tensor. The new shape will be batch size * K at dim, the rest of the shapes will be the same. Example:: >>> a = torch.arange(10).view(2, -1) >>> a tensor([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) >>> a.repeat(2, 1) tensor([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) >>> repeat_batch(a, 2) tensor([[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [5, 6, 7, 8, 9], [5, 6, 7, 8, 9]]) Perform batched Beam Search. Note: The paths and lengths generated do not include the <GO> token. :param encoder_outputs: `namedtuple` The outputs of the encoder class. :param init: `Callable(ecnoder_outputs: encoder_outputs, K: int)` -> Any: A callable that is called once at the start of the search to initialize things. This returns a blob that is passed to other callables. :param step: `Callable(paths: torch.LongTensor, extra) -> (probs: torch.FloatTensor, extra): A callable that is does a single decoding step. It returns the log probabilities over the vocabulary in the last dimension. It also returns any state the decoding process needs. :param update: `Callable(beams: torch.LongTensor, extra) -> extra: A callable that is called to edit the decoding state based on the selected best beams. :param length_penalty: `Callable(lengths: torch.LongTensor) -> torch.floatTensor A callable that generates a penalty based on the lengths. Lengths is [B, K] and the returned penalty should be [B, K, 1] (or [B, K, V] to have token based penalties?) :Keyword Arguments: * *beam* -- `int`: The number of beams to use. * *mxlen* -- `int`: The max number of steps to run the search for. :returns: tuple(preds: torch.LongTensor, lengths: torch.LongTensor, scores: torch.FloatTensor) preds: The predicted values: [B, K, max(lengths)] lengths: The length of each prediction [B, K] scores: The score of each path [B, K] # This tracks the log prob of each beam. This is distinct from score which # is based on the log prob and penalties. # Tracks the lengths of the beams, unfinished beams have lengths of zero. # [B, K, V] # This mask is for all beams that are done. # [B, K, 1] # Can creating this mask be moved out of the loop? It never changes but we don't have V # This mask selects the EOS token # This mask selects the EOS token of only the beams that are done. # Put all probability mass on the EOS token for finished beams. # Otherwise as the other beams get longer they will all give # up and eventually select this beam and all outputs become # the same. # [B, K, V] # Calculate the score of the beam based on the current length. # On the first step we only look at probabilities for the first beam. # If we don't then the probs will be the same for each beam # This means the same token will be selected for each beam # And we won't get any diversity. # Using only the first beam ensures K different starting points. # [B, K * V] # Get the log_probs of the best scoring beams # Get which beam it came from # Get the index of the word regardless of which beam it is. # Best Beam index is relative within the batch (only [0, K)). # This makes the index global (e.g. best beams for the second # batch example is in [K, 2*K)). # Select the paths to extend based on the best beams # Add the selected outputs to the paths # Select the lengths to keep tracking based on the valid beams left. # Updated lengths based on if we hit EOS # This runs if the loop didn't break meaning one beam hit the max len # Add an EOS to anything that hasn't hit the end. This makes the scores real. # Select the score of EOS # If any of the beams are done mask out the score of this EOS (they already had an EOS) # Slice off the Offsets.GO token # Its possible due to how its called that we might save the same checkpoint twice if we dont check first # https://github.com/kubeflow/pytorch-operator/issues/128 # https://github.com/pytorch/examples/blob/master/imagenet/main.py # In an env like k8s with kubeflow each worker will only see a single gpu # with an id of 0. If the gpu count is 1 then we are probably in an env like # that so we should just use the first (and only) gpu avaiable # This program assumes multiprocess/multi-device on a single node. Each # process gets a rank (via cli or ENV variable) and uses that rank to select # which gpu to use. This only makes sense on a single node, if you had 4 # processes on 2 nodes where each node has 2 GPUs then the ranks would be # 0, 1, 2, 3 but the gpus numbers would be node 0: 0, 1 and node 1: 0, 1 # and this assignment to gpu 3 would fail. On a single node with 4 processes # and 4 gpus the rank and gpu ids will align and this will work This is a reduction that is given Q, K, V and a mask vector. Different from base reductions, which get an embedding stack Inputs are the same as for a normal attention function, but the output here is a single tensor, ``[B, H]`` :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: sentence-level encoding with dim [B, d_model] Implementation of the "self_attention_head" layer from the conveRT paper (https://arxiv.org/pdf/1911.03688.pdf) :param d_model: The model hidden size :param dropout (``float``): The amount of dropout to use :param scale: should we scale the dot product attention :param d_k: The low-order project per head. This is normally `d_model // num_heads` unless set explicitly # [B, D] According to conveRT model's graph, they project token encodings to lower-dimensional query and key in single head, use them to calculate the attention score matrix that has dim [B, T, T], then sum over the query dim to get a tensor with [B, 1, T] (meaning the amount of attentions each token gets from all other tokens), scale it by sqrt of sequence lengths, then use it as the weight to weighted sum the token encoding to get the sentence encoding. we implement it in an equivalent way that can best make use of the eight_mile codes: do the matrix multiply with value first, then sum over the query dimension. :param query: a query for alignment. Can come from self in case of self-attn or decoder in case of E/D :param key: a set of keys from encoder or self :param value: a set of values from encoder or self :param mask: masking (for destination) to prevent seeing what we shouldnt :return: sentence-level encoding with dim [B, d_model] # [B, T] # (B, H, T, D), still have num_heads = 1 to use the attention function defined in eight_miles # [B, 1, T, D] # [B, T, D] A Transformer model that tries to predict if each token is real or fake This model is based on [ELECTRA: Pre-Training Text Encoders as Discriminators Rather Than Generators, Clark et al. 2019](https://openreview.net/pdf?id=r1xMH1BtvB). Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. #inputs = inputs.transpose(0, 1) # Get the offsets of the non-zero targets, the values of these are all on # self.crit = LossFn(ignore_index=Offsets.PAD, reduction='elementwise-mean') Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, .., C] The scores from the model. Batch First :param targets: torch.LongTensor, The labels. :returns: torch.FloatTensor, The loss. Assigns a weight object to the layer weights. This method exists to duplicate baseline functionality across packages. :param to_layer: the pytorch layer to assign weights to :param from_layer: pytorch layer to retrieve weights from #nn.init.orthogonal_(self.weight) Args: x: ``[B, T, H]``. y: ``[B, T, H]``. Returns: ~torch.Tensor: A scoring tensor of shape ``[batch_size, n_out, seq_len, seq_len]``. If ``n_out=1``, the dimension for ``n_out`` will be squeezed automatically. Provide a Triplet Loss using the reversed batch for negatives # reverse the batch and use as a negative example # [B, H] # [B, H] # [B, H] # [B, H] Loss from here https://arxiv.org/pdf/1705.00652.pdf see section 4 We want to minimize the negative log prob of y given x -log P(y|x) P(y|x) P(x) = P(x, y) Chain Rule of Probability P(y|x) = P(x, y) / P(x) Algebra P(y|x) = P(x, y) / \sum_\hat(y) P(x, y = \hat(y)) Marginalize over all possible ys to get the probability of x P_approx(y|x) = P(x, y) / \sum_i^k P(x, y_k) Approximate the Marginalization by just using the ys in the batch S(x, y) is the score (cosine similarity between x and y in this case) from our neural network P(x, y) = e^S(x, y) P(y|x) = e^S(x, y) / \sum_i^k e^S(x, y_k) log P(y|x) = log( e^S(x, y) / \sum_i^k e^S(x, y_k)) log P(y|x) = S(x, y) - log \sum_i^k e^S(x, y_k) -log P(y|x) = -(S(x, y) - log \sum_i^k e^S(x, y_k)) # This is the cosine distance annealing referred to in https://arxiv.org/pdf/1911.03688.pdf # These will get broadcast to [B, B, H] # [B, 1, H] # [1, B, H] # all_scores is now a batch x batch matrix where index (i, j) is the score between # the i^th x vector and the j^th y vector # [B, B] # The diagonal has the scores of correct pair, (i, i) # vec_log_sum_exp will calculate the batched log_sum_exp in a numerically stable way # the result is a [B, 1] vector which we squeeze to make it [B] to match the diag # Because we are minimizing the negative log we turned the division into a subtraction here # Batch loss # minimize the negative loss # select hard positive and hard negative pairs Use two parallel SingleHeadReduction, and concatenate the outputs. It is used in the conveRT paper (https://arxiv.org/pdf/1911.03688.pdf) Two parallel 1-head self-attention, then concatenate the output :param d_model: dim of the self-attention :param dropout: dropout of the self-attention :param scale: scale fo the self-attention :param d_k: d_k of the self-attention :return: concatenation of the two 1-head attention Implementation of the FFN layer from the convert paper (https://arxiv.org/pdf/1911.03688.pdf) :param insz: input dim :param hszs: list of hidden sizes :param outsz: output dim :param pdrop: dropout of each hidden layer Abstract base for dual encoders We can assume that our dual encoder needs to end up in the same output plane between the encoders, and we can define the set of losses here that we are likely to need for most. A simple encoder where the encoders are injected and supply the `encode_query_base` and `encode_response_base` Legacy model for transformer-based dual encoder This is a dual-encoder transformer model which shares the lower layer encoder transformer sub-graph The reduction layer is attention based and takes the same input as the transformer layers. It pools the reprs Finally, the feed-forward stacks are applied via subclassing. Note that this model predates the more abstract `AbstractDualEncoder` which could accomplish the same thing by injecting the same `nn.Module` for encoder_1 and encoder_2 consisting of the transformer and reduction The attention modules originally created for DE have 3 (redundant) inputs, so use all 3 here The standard reduction modules use an input and a length The [CLS] or <s> reduction on the first token just needs the first timestep 2 Encoders (E1, E2). E1 is a Transformer followed by attention reduction. E2 is just a pooling of embeddings A CUDA timer context manager that can be used to track and record events The timer is only enabled if `MEAD_PYTORCH_TIMER` is true. If its enabled, it will cause a large slowdown (similar to `CUDA_LAUNCH_BLOCKING`). :param name: :param sync_before: Weight individual training examples Weight individual training examples with multiple heads Weight individual training examples Evaluate some loss over a sequence. :param inputs: torch.FloatTensor, [B, T, C] The scores from the model. Batch First :param targets: torch.LongTensor, [B, T] The labels. :param weight: sample weights [B, ] :returns: torch.FloatTensor, The loss. # [B, T]
2.453006
2
setup.py
flother/pdf-search
5
9821
<filename>setup.py<gh_stars>1-10 from setuptools import setup setup( name='espdf', version='0.1.0-dev', url='https://github.com/flother/pdf-search', py_modules=( 'espdf', ), install_requires=( 'certifi', 'elasticsearch-dsl', ), entry_points={ 'console_scripts': ( 'espdf=espdf:cli', ), }, )
<filename>setup.py<gh_stars>1-10 from setuptools import setup setup( name='espdf', version='0.1.0-dev', url='https://github.com/flother/pdf-search', py_modules=( 'espdf', ), install_requires=( 'certifi', 'elasticsearch-dsl', ), entry_points={ 'console_scripts': ( 'espdf=espdf:cli', ), }, )
none
1
1.383159
1
pywallet/network.py
martexcoin/pywallet
1
9822
<filename>pywallet/network.py class BitcoinGoldMainNet(object): """Bitcoin Gold MainNet version bytes. """ NAME = "Bitcoin Gold Main Net" COIN = "BTG" SCRIPT_ADDRESS = 0x17 # int(0x17) = 23 PUBKEY_ADDRESS = 0x26 # int(0x26) = 38 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488b21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class BitcoinCashMainNet(object): """Bitcoin Cash MainNet version bytes.""" NAME = "Bitcoin Cash Main Net" COIN = "BCH" SCRIPT_ADDRESS = 0x28 # int(0x28) = 40 PUBKEY_ADDRESS = 0x1C # int(0x00) = 28 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488b21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/145'/0'/" class DashMainNet(object): """Dash MainNet version bytes.""" NAME = "Dash Main Net" COIN = "DASH" SCRIPT_ADDRESS = 0x10 # int(0x10) = 16 PUBKEY_ADDRESS = 0x4C # int(0x4C) = 76 # Used to create payment addresses SECRET_KEY = 0xCC # int(0xCC) = 204 # Used for WIF format EXT_PUBLIC_KEY = 0X0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0X0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/5'/0'/" class DashTestNet(object): """Dash TestNet version bytes.""" NAME = "Dash Test Net" COIN = "DASH" SCRIPT_ADDRESS = 0x13 # int(0x13) = 19 PUBKEY_ADDRESS = 0x8C # int(0x8C) = 140 # Used to create payment addresses SECRET_KEY = 0xEF # int(0xEF) = 239 # Used for WIF format EXT_PUBLIC_KEY = <KEY> # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x04358394 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/1'/0'/" class MarteXMainNet(object): """MarteX MainNet version bytes.""" NAME = "MarteX Main Net" COIN = "MXT" SCRIPT_ADDRESS = 0x05 # int(0x05) = 05 PUBKEY_ADDRESS = 0x32 # int(0x32) = 50 # Used to create payment addresses SECRET_KEY = 0xB2 # int(0xB2) = 178 # Used for WIF format EXT_PUBLIC_KEY = 0X0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0X0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/180'/0'/" class MarteXTestNet(object): """MarteX TestNet version bytes.""" NAME = "MarteX Test Net" COIN = "MXT" SCRIPT_ADDRESS = 0xC4 # int(0xC4) = 196 PUBKEY_ADDRESS = 0x6C # int(0x6F) = 111 # Used to create payment addresses SECRET_KEY = 0x144 # int(0x144) = 324 # Used for WIF format EXT_PUBLIC_KEY = 0x043587CF # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x04358394 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/1'/0'/" class OmniMainNet(object): """Bitcoin MainNet version bytes. From https://github.com/OmniLayer/omnicore/blob/develop/src/chainparams.cpp """ NAME = "Omni Main Net" COIN = "USDT" SCRIPT_ADDRESS = 0x00 # int(0x00) = 0 PUBKEY_ADDRESS = 0x05 # int(0x05) = 5 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class OmniTestNet(object): """Bitcoin MainNet version bytes. From https://github.com/OmniLayer/omnicore/blob/develop/src/chainparams.cpp """ NAME = "Omni Test Net" COIN = "USDT" SCRIPT_ADDRESS = 0x6f # int(0x6f) = 111 PUBKEY_ADDRESS = 0xc4 # int(0xc4) = 196 # Used to create payment addresses SECRET_KEY = 0xef # int(0xef) = 239 # Used for WIF format EXT_PUBLIC_KEY = 0x043587CF # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x04358394 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class BitcoinMainNet(object): """Bitcoin MainNet version bytes. From https://github.com/bitcoin/bitcoin/blob/v0.9.0rc1/src/chainparams.cpp """ NAME = "Bitcoin Main Net" COIN = "BTC" SCRIPT_ADDRESS = 0x05 # int(0x05) = 5 PUBKEY_ADDRESS = 0x00 # int(0x00) = 0 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class FeathercoinMainNet(object): """Feathercoin MainNet version bytes. From https://github.com/FeatherCoin/Feathercoin/blob/master-0.13/src/chainparams.cpp """ NAME = "Feathercoin Main Net" COIN = "FTC" SCRIPT_ADDRESS = 0x05 # int(0x05) = 5 PUBKEY_ADDRESS = 0x0E # int(0x0E) = 14 # Used to create payment addresses SECRET_KEY = 0x8E # int(0x8E) = 142 # Used for WIF format EXT_PUBLIC_KEY = 0x0488BC26 # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488DAEE # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/4'/0'/" class BitcoinTestNet(object): """Bitcoin TestNet version bytes. From https://github.com/bitcoin/bitcoin/blob/v0.9.0rc1/src/chainparams.cpp """ NAME = "Bitcoin Test Net" COIN = "BTC" SCRIPT_ADDRESS = 0xc4 # int(0xc4) = 196 PUBKEY_ADDRESS = 0x6f # int(0x6f) = 111 SECRET_KEY = 0xEF # int(0xef) = 239 EXT_PUBLIC_KEY = 0x043587CF EXT_SECRET_KEY = 0x04358394 BIP32_PATH = "m/44'/1'/0'/" class LitecoinMainNet(object): """Litecoin MainNet version bytes Primary version bytes from: https://github.com/litecoin-project/litecoin/blob/master-0.8/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=453395.0 """ NAME = "Litecoin Main Net" COIN = "LTC" SCRIPT_ADDRESS = 0x05 # int(0x05) = 5 PUBKEY_ADDRESS = 0x30 # int(0x30) = 48 SECRET_KEY = PUBKEY_ADDRESS + 128 # = int(0xb0) = 176 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=453395.0 # EXT_PUBLIC_KEY = <KEY> # EXT_SECRET_KEY = <KEY> # same as Bitcoin's # https://github.com/ranaroussi/pywallet/issues/6 EXT_PUBLIC_KEY = 0x0488B21E EXT_SECRET_KEY = 0x0488ADE4 BIP32_PATH = "m/44'/2'/0'/" class LitecoinTestNet(object): """Litecoin TestNet version bytes Primary version bytes from: https://github.com/litecoin-project/litecoin/blob/master-0.8/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=453395.0 """ NAME = "Litecoin Test Net" COIN = "LTC" SCRIPT_ADDRESS = 0xc4 # int(0xc4) = 196 PUBKEY_ADDRESS = 0x6f # int(0x6f) = 111 SECRET_KEY = PUBKEY_ADDRESS + 128 # = int(0xef) = 239 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=453395.0 # EXT_PUBLIC_KEY = 0x0436f6e1 # EXT_SECRET_KEY = 0x0436ef7d # same as Bitcoin's # https://github.com/ranaroussi/pywallet/issues/6 EXT_PUBLIC_KEY = 0x043587CF EXT_SECRET_KEY = 0x04358394 BIP32_PATH = "m/44'/1'/0'/" class DogecoinMainNet(object): """Dogecoin MainNet version bytes Primary version bytes from: https://github.com/dogecoin/dogecoin/blob/1.5.2/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=409731 """ NAME = "Dogecoin Main Net" COIN = "DOGE" SCRIPT_ADDRESS = 0x16 # int(0x16) = 22 PUBKEY_ADDRESS = 0x1e # int(0x1e) = 30 SECRET_KEY = PUBKEY_ADDRESS + 128 # int(0x9e) = 158 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=409731 EXT_PUBLIC_KEY = 0x02facafd EXT_SECRET_KEY = 0x02fac398 BIP32_PATH = "m/44'/3'/0'/" class DogecoinTestNet(object): """Dogecoin TestNet version bytes Primary version bytes from: https://github.com/dogecoin/dogecoin/blob/1.5.2/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=409731 """ NAME = "Dogecoin Test Net" COIN = "DOGE" SCRIPT_ADDRESS = 0xc4 # int(0xc4) = 196 PUBKEY_ADDRESS = 0x71 # int(0x71) = 113 SECRET_KEY = PUBKEY_ADDRESS + 128 # int(0xf1) = 241 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=409731 EXT_PUBLIC_KEY = 0x0432a9a8 EXT_SECRET_KEY = 0x0432a243 BIP32_PATH = "m/44'/1'/0'/" class BlockCypherTestNet(object): """BlockCypher TestNet version bytes. From http://dev.blockcypher.com/#testing """ NAME = "BlockCypher Test Net" COIN = "BlockCypher" SCRIPT_ADDRESS = 0x1f # int(0x1f) = 31 PUBKEY_ADDRESS = 0x1b # int(0x1b) = 27 # Used to create payment addresses SECRET_KEY = 0x49 # int(0x49) = 73 # Used for WIF format EXT_PUBLIC_KEY = 0x2d413ff # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x2d40fc3 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/1'/0'/" class QtumMainNet(object): """Qtum MainNet version bytes Primary version bytes from: https://github.com/qtumproject/qtum/blob/master/src/chainparams.cpp """ NAME = "Qtum Main Net" COIN = "QTUM" SCRIPT_ADDRESS = 0x32 # int(0x32) = 50 PUBKEY_ADDRESS = 0x3A # int(0x3A) = 58 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/88'/0'/" class QtumTestNet(object): """Qtum TestNet version bytes Primary version bytes from: https://github.com/qtumproject/qtum/blob/master/src/chainparams.cpp """ NAME = "Qtum Test Net" COIN = "QTUM" SCRIPT_ADDRESS = 0x6E # int(0x6e) = 110 PUBKEY_ADDRESS = 0x78 # int(0x78) = 120 SECRET_KEY = 0xEF # int(0xef) = 239 EXT_PUBLIC_KEY = 0x043587CF EXT_SECRET_KEY = 0x04358394 BIP32_PATH = "m/44'/88'/0'/"
<filename>pywallet/network.py class BitcoinGoldMainNet(object): """Bitcoin Gold MainNet version bytes. """ NAME = "Bitcoin Gold Main Net" COIN = "BTG" SCRIPT_ADDRESS = 0x17 # int(0x17) = 23 PUBKEY_ADDRESS = 0x26 # int(0x26) = 38 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488b21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class BitcoinCashMainNet(object): """Bitcoin Cash MainNet version bytes.""" NAME = "Bitcoin Cash Main Net" COIN = "BCH" SCRIPT_ADDRESS = 0x28 # int(0x28) = 40 PUBKEY_ADDRESS = 0x1C # int(0x00) = 28 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488b21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/145'/0'/" class DashMainNet(object): """Dash MainNet version bytes.""" NAME = "Dash Main Net" COIN = "DASH" SCRIPT_ADDRESS = 0x10 # int(0x10) = 16 PUBKEY_ADDRESS = 0x4C # int(0x4C) = 76 # Used to create payment addresses SECRET_KEY = 0xCC # int(0xCC) = 204 # Used for WIF format EXT_PUBLIC_KEY = 0X0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0X0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/5'/0'/" class DashTestNet(object): """Dash TestNet version bytes.""" NAME = "Dash Test Net" COIN = "DASH" SCRIPT_ADDRESS = 0x13 # int(0x13) = 19 PUBKEY_ADDRESS = 0x8C # int(0x8C) = 140 # Used to create payment addresses SECRET_KEY = 0xEF # int(0xEF) = 239 # Used for WIF format EXT_PUBLIC_KEY = <KEY> # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x04358394 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/1'/0'/" class MarteXMainNet(object): """MarteX MainNet version bytes.""" NAME = "MarteX Main Net" COIN = "MXT" SCRIPT_ADDRESS = 0x05 # int(0x05) = 05 PUBKEY_ADDRESS = 0x32 # int(0x32) = 50 # Used to create payment addresses SECRET_KEY = 0xB2 # int(0xB2) = 178 # Used for WIF format EXT_PUBLIC_KEY = 0X0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0X0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/180'/0'/" class MarteXTestNet(object): """MarteX TestNet version bytes.""" NAME = "MarteX Test Net" COIN = "MXT" SCRIPT_ADDRESS = 0xC4 # int(0xC4) = 196 PUBKEY_ADDRESS = 0x6C # int(0x6F) = 111 # Used to create payment addresses SECRET_KEY = 0x144 # int(0x144) = 324 # Used for WIF format EXT_PUBLIC_KEY = 0x043587CF # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x04358394 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/1'/0'/" class OmniMainNet(object): """Bitcoin MainNet version bytes. From https://github.com/OmniLayer/omnicore/blob/develop/src/chainparams.cpp """ NAME = "Omni Main Net" COIN = "USDT" SCRIPT_ADDRESS = 0x00 # int(0x00) = 0 PUBKEY_ADDRESS = 0x05 # int(0x05) = 5 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class OmniTestNet(object): """Bitcoin MainNet version bytes. From https://github.com/OmniLayer/omnicore/blob/develop/src/chainparams.cpp """ NAME = "Omni Test Net" COIN = "USDT" SCRIPT_ADDRESS = 0x6f # int(0x6f) = 111 PUBKEY_ADDRESS = 0xc4 # int(0xc4) = 196 # Used to create payment addresses SECRET_KEY = 0xef # int(0xef) = 239 # Used for WIF format EXT_PUBLIC_KEY = 0x043587CF # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x04358394 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class BitcoinMainNet(object): """Bitcoin MainNet version bytes. From https://github.com/bitcoin/bitcoin/blob/v0.9.0rc1/src/chainparams.cpp """ NAME = "Bitcoin Main Net" COIN = "BTC" SCRIPT_ADDRESS = 0x05 # int(0x05) = 5 PUBKEY_ADDRESS = 0x00 # int(0x00) = 0 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/0'/0'/" class FeathercoinMainNet(object): """Feathercoin MainNet version bytes. From https://github.com/FeatherCoin/Feathercoin/blob/master-0.13/src/chainparams.cpp """ NAME = "Feathercoin Main Net" COIN = "FTC" SCRIPT_ADDRESS = 0x05 # int(0x05) = 5 PUBKEY_ADDRESS = 0x0E # int(0x0E) = 14 # Used to create payment addresses SECRET_KEY = 0x8E # int(0x8E) = 142 # Used for WIF format EXT_PUBLIC_KEY = 0x0488BC26 # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488DAEE # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/4'/0'/" class BitcoinTestNet(object): """Bitcoin TestNet version bytes. From https://github.com/bitcoin/bitcoin/blob/v0.9.0rc1/src/chainparams.cpp """ NAME = "Bitcoin Test Net" COIN = "BTC" SCRIPT_ADDRESS = 0xc4 # int(0xc4) = 196 PUBKEY_ADDRESS = 0x6f # int(0x6f) = 111 SECRET_KEY = 0xEF # int(0xef) = 239 EXT_PUBLIC_KEY = 0x043587CF EXT_SECRET_KEY = 0x04358394 BIP32_PATH = "m/44'/1'/0'/" class LitecoinMainNet(object): """Litecoin MainNet version bytes Primary version bytes from: https://github.com/litecoin-project/litecoin/blob/master-0.8/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=453395.0 """ NAME = "Litecoin Main Net" COIN = "LTC" SCRIPT_ADDRESS = 0x05 # int(0x05) = 5 PUBKEY_ADDRESS = 0x30 # int(0x30) = 48 SECRET_KEY = PUBKEY_ADDRESS + 128 # = int(0xb0) = 176 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=453395.0 # EXT_PUBLIC_KEY = <KEY> # EXT_SECRET_KEY = <KEY> # same as Bitcoin's # https://github.com/ranaroussi/pywallet/issues/6 EXT_PUBLIC_KEY = 0x0488B21E EXT_SECRET_KEY = 0x0488ADE4 BIP32_PATH = "m/44'/2'/0'/" class LitecoinTestNet(object): """Litecoin TestNet version bytes Primary version bytes from: https://github.com/litecoin-project/litecoin/blob/master-0.8/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=453395.0 """ NAME = "Litecoin Test Net" COIN = "LTC" SCRIPT_ADDRESS = 0xc4 # int(0xc4) = 196 PUBKEY_ADDRESS = 0x6f # int(0x6f) = 111 SECRET_KEY = PUBKEY_ADDRESS + 128 # = int(0xef) = 239 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=453395.0 # EXT_PUBLIC_KEY = 0x0436f6e1 # EXT_SECRET_KEY = 0x0436ef7d # same as Bitcoin's # https://github.com/ranaroussi/pywallet/issues/6 EXT_PUBLIC_KEY = 0x043587CF EXT_SECRET_KEY = 0x04358394 BIP32_PATH = "m/44'/1'/0'/" class DogecoinMainNet(object): """Dogecoin MainNet version bytes Primary version bytes from: https://github.com/dogecoin/dogecoin/blob/1.5.2/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=409731 """ NAME = "Dogecoin Main Net" COIN = "DOGE" SCRIPT_ADDRESS = 0x16 # int(0x16) = 22 PUBKEY_ADDRESS = 0x1e # int(0x1e) = 30 SECRET_KEY = PUBKEY_ADDRESS + 128 # int(0x9e) = 158 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=409731 EXT_PUBLIC_KEY = 0x02facafd EXT_SECRET_KEY = 0x02fac398 BIP32_PATH = "m/44'/3'/0'/" class DogecoinTestNet(object): """Dogecoin TestNet version bytes Primary version bytes from: https://github.com/dogecoin/dogecoin/blob/1.5.2/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=409731 """ NAME = "Dogecoin Test Net" COIN = "DOGE" SCRIPT_ADDRESS = 0xc4 # int(0xc4) = 196 PUBKEY_ADDRESS = 0x71 # int(0x71) = 113 SECRET_KEY = PUBKEY_ADDRESS + 128 # int(0xf1) = 241 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=409731 EXT_PUBLIC_KEY = 0x0432a9a8 EXT_SECRET_KEY = 0x0432a243 BIP32_PATH = "m/44'/1'/0'/" class BlockCypherTestNet(object): """BlockCypher TestNet version bytes. From http://dev.blockcypher.com/#testing """ NAME = "BlockCypher Test Net" COIN = "BlockCypher" SCRIPT_ADDRESS = 0x1f # int(0x1f) = 31 PUBKEY_ADDRESS = 0x1b # int(0x1b) = 27 # Used to create payment addresses SECRET_KEY = 0x49 # int(0x49) = 73 # Used for WIF format EXT_PUBLIC_KEY = 0x2d413ff # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x2d40fc3 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/1'/0'/" class QtumMainNet(object): """Qtum MainNet version bytes Primary version bytes from: https://github.com/qtumproject/qtum/blob/master/src/chainparams.cpp """ NAME = "Qtum Main Net" COIN = "QTUM" SCRIPT_ADDRESS = 0x32 # int(0x32) = 50 PUBKEY_ADDRESS = 0x3A # int(0x3A) = 58 # Used to create payment addresses SECRET_KEY = 0x80 # int(0x80) = 128 # Used for WIF format EXT_PUBLIC_KEY = 0x0488B21E # Used to serialize public BIP32 addresses EXT_SECRET_KEY = 0x0488ADE4 # Used to serialize private BIP32 addresses BIP32_PATH = "m/44'/88'/0'/" class QtumTestNet(object): """Qtum TestNet version bytes Primary version bytes from: https://github.com/qtumproject/qtum/blob/master/src/chainparams.cpp """ NAME = "Qtum Test Net" COIN = "QTUM" SCRIPT_ADDRESS = 0x6E # int(0x6e) = 110 PUBKEY_ADDRESS = 0x78 # int(0x78) = 120 SECRET_KEY = 0xEF # int(0xef) = 239 EXT_PUBLIC_KEY = 0x043587CF EXT_SECRET_KEY = 0x04358394 BIP32_PATH = "m/44'/88'/0'/"
en
0.687791
Bitcoin Gold MainNet version bytes. # int(0x17) = 23 # int(0x26) = 38 # Used to create payment addresses # int(0x80) = 128 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Bitcoin Cash MainNet version bytes. # int(0x28) = 40 # int(0x00) = 28 # Used to create payment addresses # int(0x80) = 128 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Dash MainNet version bytes. # int(0x10) = 16 # int(0x4C) = 76 # Used to create payment addresses # int(0xCC) = 204 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Dash TestNet version bytes. # int(0x13) = 19 # int(0x8C) = 140 # Used to create payment addresses # int(0xEF) = 239 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses MarteX MainNet version bytes. # int(0x05) = 05 # int(0x32) = 50 # Used to create payment addresses # int(0xB2) = 178 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses MarteX TestNet version bytes. # int(0xC4) = 196 # int(0x6F) = 111 # Used to create payment addresses # int(0x144) = 324 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Bitcoin MainNet version bytes. From https://github.com/OmniLayer/omnicore/blob/develop/src/chainparams.cpp # int(0x00) = 0 # int(0x05) = 5 # Used to create payment addresses # int(0x80) = 128 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Bitcoin MainNet version bytes. From https://github.com/OmniLayer/omnicore/blob/develop/src/chainparams.cpp # int(0x6f) = 111 # int(0xc4) = 196 # Used to create payment addresses # int(0xef) = 239 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Bitcoin MainNet version bytes. From https://github.com/bitcoin/bitcoin/blob/v0.9.0rc1/src/chainparams.cpp # int(0x05) = 5 # int(0x00) = 0 # Used to create payment addresses # int(0x80) = 128 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Feathercoin MainNet version bytes. From https://github.com/FeatherCoin/Feathercoin/blob/master-0.13/src/chainparams.cpp # int(0x05) = 5 # int(0x0E) = 14 # Used to create payment addresses # int(0x8E) = 142 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Bitcoin TestNet version bytes. From https://github.com/bitcoin/bitcoin/blob/v0.9.0rc1/src/chainparams.cpp # int(0xc4) = 196 # int(0x6f) = 111 # int(0xef) = 239 Litecoin MainNet version bytes Primary version bytes from: https://github.com/litecoin-project/litecoin/blob/master-0.8/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=453395.0 # int(0x05) = 5 # int(0x30) = 48 # = int(0xb0) = 176 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=453395.0 # EXT_PUBLIC_KEY = <KEY> # EXT_SECRET_KEY = <KEY> # same as Bitcoin's # https://github.com/ranaroussi/pywallet/issues/6 Litecoin TestNet version bytes Primary version bytes from: https://github.com/litecoin-project/litecoin/blob/master-0.8/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=453395.0 # int(0xc4) = 196 # int(0x6f) = 111 # = int(0xef) = 239 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=453395.0 # EXT_PUBLIC_KEY = 0x0436f6e1 # EXT_SECRET_KEY = 0x0436ef7d # same as Bitcoin's # https://github.com/ranaroussi/pywallet/issues/6 Dogecoin MainNet version bytes Primary version bytes from: https://github.com/dogecoin/dogecoin/blob/1.5.2/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=409731 # int(0x16) = 22 # int(0x1e) = 30 # int(0x9e) = 158 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=409731 Dogecoin TestNet version bytes Primary version bytes from: https://github.com/dogecoin/dogecoin/blob/1.5.2/src/base58.h Unofficial extended version bytes from https://bitcointalk.org/index.php?topic=409731 # int(0xc4) = 196 # int(0x71) = 113 # int(0xf1) = 241 # Unofficial extended version bytes taken from # https://bitcointalk.org/index.php?topic=409731 BlockCypher TestNet version bytes. From http://dev.blockcypher.com/#testing # int(0x1f) = 31 # int(0x1b) = 27 # Used to create payment addresses # int(0x49) = 73 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Qtum MainNet version bytes Primary version bytes from: https://github.com/qtumproject/qtum/blob/master/src/chainparams.cpp # int(0x32) = 50 # int(0x3A) = 58 # Used to create payment addresses # int(0x80) = 128 # Used for WIF format # Used to serialize public BIP32 addresses # Used to serialize private BIP32 addresses Qtum TestNet version bytes Primary version bytes from: https://github.com/qtumproject/qtum/blob/master/src/chainparams.cpp # int(0x6e) = 110 # int(0x78) = 120 # int(0xef) = 239
2.86083
3
conanfile.py
sintef-ocean/conan-clapack
0
9823
#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile, CMake, tools import shutil class ClapackConan(ConanFile): name = "clapack" version = "3.2.1" license = "BSD 3-Clause" # BSD-3-Clause-Clear url = "https://github.com/sintef-ocean/conan-clapack" author = "<NAME>" homepage = "http://www.netlib.org/clapack/" description = \ "CLAPACK's goal is to provide LAPACK for someone who does " \ "not have access to a Fortran compiler" topics = ("clapack", "LAPACK", "Port to C", "Numerical linear algebra") settings = "os", "compiler", "build_type", "arch" options = { "fPIC": [True, False], } default_options = { "fPIC": True, } generators = ("cmake_paths", "cmake_find_package") exports = ["patch/*"] source_file = "clapack-{}-CMAKE.tgz".format(version) source_subfolder = source_file[:-4] build_subfolder = "build_subfolder" def source(self): link = "http://www.netlib.org/clapack/" + self.source_file tools.get(link, sha1="5ea1bcc4314e392bca8b9e5f61d44355cf9f4cc1") tools.patch(patch_file="patch/MainCMakeLists.patch", base_path=self.source_subfolder) tools.patch(patch_file="patch/SRC_CMakeLists.patch", base_path=self.source_subfolder) tools.patch(patch_file="patch/F2C_CMakeLists.patch", base_path=self.source_subfolder) tools.patch(patch_file="patch/BLAS_CMakeLists.patch", base_path=self.source_subfolder) shutil.move(self.source_subfolder + "/COPYING", self.source_subfolder + "/LICENSE") def build(self): cmake = CMake(self) if self.settings.os != "Windows": cmake.definitions['CMAKE_POSITION_INDEPENDENT_CODE'] = self.options.fPIC cmake.configure(source_folder=self.source_subfolder, build_folder=self.build_subfolder) cmake.build() cmake.install() def package(self): self.copy("COPYING", dst="licenses", src=self.source_subfolder, ignore_case=True, keep_path=False) def package_info(self): self.cpp_info.name = 'CLAPACK' if self.settings.compiler == "Visual Studio": self.cpp_info.libs = ["libf2c", "blas", "lapack"] if self.settings.build_type == "Debug": for i in range(len(self.cpp_info.libs)): self.cpp_info.libs[i] += 'd' else: self.cpp_info.libs = ["lapack", "blas", "f2c"] def config_options(self): if self.settings.os == "Windows": del self.options.fPIC def configure(self): del self.settings.compiler.libcxx
#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile, CMake, tools import shutil class ClapackConan(ConanFile): name = "clapack" version = "3.2.1" license = "BSD 3-Clause" # BSD-3-Clause-Clear url = "https://github.com/sintef-ocean/conan-clapack" author = "<NAME>" homepage = "http://www.netlib.org/clapack/" description = \ "CLAPACK's goal is to provide LAPACK for someone who does " \ "not have access to a Fortran compiler" topics = ("clapack", "LAPACK", "Port to C", "Numerical linear algebra") settings = "os", "compiler", "build_type", "arch" options = { "fPIC": [True, False], } default_options = { "fPIC": True, } generators = ("cmake_paths", "cmake_find_package") exports = ["patch/*"] source_file = "clapack-{}-CMAKE.tgz".format(version) source_subfolder = source_file[:-4] build_subfolder = "build_subfolder" def source(self): link = "http://www.netlib.org/clapack/" + self.source_file tools.get(link, sha1="5ea1bcc4314e392bca8b9e5f61d44355cf9f4cc1") tools.patch(patch_file="patch/MainCMakeLists.patch", base_path=self.source_subfolder) tools.patch(patch_file="patch/SRC_CMakeLists.patch", base_path=self.source_subfolder) tools.patch(patch_file="patch/F2C_CMakeLists.patch", base_path=self.source_subfolder) tools.patch(patch_file="patch/BLAS_CMakeLists.patch", base_path=self.source_subfolder) shutil.move(self.source_subfolder + "/COPYING", self.source_subfolder + "/LICENSE") def build(self): cmake = CMake(self) if self.settings.os != "Windows": cmake.definitions['CMAKE_POSITION_INDEPENDENT_CODE'] = self.options.fPIC cmake.configure(source_folder=self.source_subfolder, build_folder=self.build_subfolder) cmake.build() cmake.install() def package(self): self.copy("COPYING", dst="licenses", src=self.source_subfolder, ignore_case=True, keep_path=False) def package_info(self): self.cpp_info.name = 'CLAPACK' if self.settings.compiler == "Visual Studio": self.cpp_info.libs = ["libf2c", "blas", "lapack"] if self.settings.build_type == "Debug": for i in range(len(self.cpp_info.libs)): self.cpp_info.libs[i] += 'd' else: self.cpp_info.libs = ["lapack", "blas", "f2c"] def config_options(self): if self.settings.os == "Windows": del self.options.fPIC def configure(self): del self.settings.compiler.libcxx
en
0.37357
#!/usr/bin/env python # -*- coding: utf-8 -*- # BSD-3-Clause-Clear
1.792409
2
yacos/algorithm/metaheuristics.py
ComputerSystemsLaboratory/YaCoS
8
9824
""" Copyright 2021 <NAME>. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os from dataclasses import dataclass import pygmo as pg from yacos.essential import Sequence from yacos.essential import IO from yacos.essential import Engine class Pygmo: """A Pygmo's strategy.""" __version__ = '1.0.0' __flags = None # {key: {'goal': float, # 'seq': list}} __results = None # SGA # {gen = {'fevals': int, # 'best': float, # 'improvement': float}} # # PSO # {gen: {'fevals': int, # 'gbest': float, # 'meanvel': float, # 'meanlbest': float, # 'avgdist': float} __log = None class Problem: """Pygmo's problem.""" def __init__(self, first_key, last_key, passes_dict, dimension, goal, compiler, benchmark_directory, working_set, times, tool, verify_output): """Construct a Pygmo problem. Parameters ---------- first_key : int The index of the first pass. last_key : int The index of the last pass. passes_dict : dict The dictionary with the available passes. dimension : int The length of a sequence. goal : str compiler : str benchmark_directory : str working_set : int times: int tool: str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ self.first_key = first_key self.last_key = last_key self.passes_dict = passes_dict self.dimension = dimension self.goal = goal self.compiler = compiler self.benchmark_directory = benchmark_directory self.working_set = working_set self.times = times self.tool = tool self.verify_output = verify_output def __deepcopy__(self, *args, **kwargs): """Deeep copy.""" return self def fitness(self, sequence): """Calculate and return the fitness.""" sequence = Sequence.fix_index(list(sequence)) sequence = Sequence.sanitize(sequence) sequence = Sequence.index_pass_to_list(sequence, self.passes_dict) goal_value = Engine.evaluate(self.goal, Sequence.name_pass_to_string( sequence ), self.compiler, self.benchmark_directory, self.working_set, self.times, self.tool, self.verify_output) return [goal_value] def get_nix(self): """Integer dimension of the problem.""" return self.dimension def get_bounds(self): """Box-bounds.""" return ([self.first_key] * self.dimension, [self.last_key] * self.dimension) def get_name(self): """Problem name.""" return 'Optimization Selection' def get_extra_info(self): """Info.""" return '\tDimensions: ' + str(self.dimension) @dataclass class PygmoFlags: """Pygmo flags. Parameters ---------- first_key : int The index of the first pass. last_key : int The index of the last pass. passes_dict : dict The dictionary with the available passes. dimension : int The length of a sequence. population : int goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times: int Execution times tool : str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ first_key: int last_key: int passes_dict: dict dimension: int population: int goals: dict compiler: str benchmarks_directory: str working_set: int times: int tool: str verify_output: bool def __init__(self, dimension, population, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output): """Initialize the arguments. Parameters ---------- dimension : int The length of a sequence. population : int passes_filename : str The file that describes the passes to use. goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times: int Execution times tool: str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ first_key, last_key, passes_dict = IO.load_passes(passes_filename) # When the goal is obtained during compile time # and the working set is not defined during compilation, # we do not need the working set. self.__flags = self.PygmoFlags(first_key, last_key, passes_dict, dimension, population, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output) @property def results(self): """Getter.""" return self.__results @property def log(self): """Getter.""" return self.__log def exec(self, algorithm, benchmark): """Execute the algorithm. Parameter --------- algorithm : Pygmo algorithm benchmark : str """ # Step 1: Algorithm algorithm = pg.algorithm(algorithm) # algorithm.set_verbosity(1) # Step 2: Instantiate a pygmo problem index = benchmark.find('.') # Benchmark directtory bench_dir = os.path.join(self.__flags.benchmarks_directory, benchmark[:index], benchmark[index+1:]) problem = self.Problem(self.__flags.first_key, self.__flags.last_key, self.__flags.passes_dict, self.__flags.dimension, self.__flags.goals, self.__flags.compiler, bench_dir, self.__flags.working_set, self.__flags.times, self.__flags.tool, self.__flags.verify_output) problem = pg.problem(problem) # Step 3: The initial population population = pg.population(problem, self.__flags.population) # Step 4: Evolve the population population = algorithm.evolve(population) # Step 5: Get the results sga_sequence = population.get_x().tolist() sga_fitness = population.get_f().tolist() self.__results = {} for index in range(self.__flags.population): sequence = Sequence.index_pass_to_list(sga_sequence[index], self.__flags.passes_dict) goal_value = sga_fitness[index][0] if goal_value == float('inf'): continue self.__results[index] = {'seq': sequence, 'goal': goal_value} # Step 6: Get the log self.__log = {} if algorithm.get_name() == 'SGA: Genetic Algorithm': uda = algorithm.extract(pg.sga) log = uda.get_log() for (gen, fevals, best, improvement) in log: self.__log[gen] = {'fevals': fevals, 'best': best, 'improvement': improvement} elif algorithm.get_name() == 'PSO: Particle Swarm Optimization': uda = algorithm.extract(pg.pso) log = uda.get_log() for (gen, fevals, gbest, meanvel, meanlbest, avgdist) in log: self.__log[gen] = {'fevals': fevals, 'gbest': gbest, 'meanvel': meanvel, 'meanlbest': meanlbest, 'avgdist': avgdist} class SGA(Pygmo): """Simple Genetic Algorithm.""" __version__ = '1.0.0' __flags = None @dataclass class Flags: """Pygmo flags. Parameters ---------- generations : int cr : float Crossover probability m : float Mutation probability param_m : float Distribution index (polynomial mutation), gaussian width (gaussian mutation) or inactive (uniform mutation) param_s : float The number of best individuals to use in “truncated” selection or the size of the tournament in tournament selection. crossover : str exponential, binomial or single mutation : str gaussian, polynomial or uniform selection : str tournament or truncated seed : int """ generations: int cr: float m: float param_m: float param_s: float crossover: str mutation: str selection: str seed: int def __init__(self, generations, population, cr, m, param_m, param_s, crossover, mutation, selection, seed, dimension, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output): """Initialize a SGA object. Parameters ---------- generations : int population : int cr : float Crossover probability m : float Mutation probability param_m : float Distribution index (polynomial mutation), gaussian width (gaussian mutation) or inactive (uniform mutation) param_s : float The number of best individuals to use in “truncated” selection or the size of the tournament in tournament selection. crossover : str exponential, binomial or single mutation : str gaussian, polynomial or uniform selection : str tournament or truncated seed : int dimension : int The length of a sequence. passes_filename : str The file that describes the passes to use. goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times : int Execution times tool : str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ self.__flags = self.Flags(generations, cr, m, param_m, param_s, crossover, mutation, selection, seed) super().__init__(dimension, population, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output) def run(self, benchmark): """Execute the algorithm. Parameter -------- benchmark: str """ if self.__flags.seed is None: algorithm = pg.sga(gen=self.__flags.generations, cr=self.__flags.cr, m=self.__flags.m, param_m=self.__flags.param_m, param_s=self.__flags.param_s, crossover=self.__flags.crossover, mutation=self.__flags.mutation, selection=self.__flags.selection) else: algorithm = pg.sga(gen=self.__flags.generations, cr=self.__flags.cr, m=self.__flags.m, param_m=self.__flags.param_m, param_s=self.__flags.param_s, crossover=self.__flags.crossover, mutation=self.__flags.mutation, selection=self.__flags.selection, seed=self.__flags.seed) # Execute super().exec(algorithm, benchmark) class PSO(Pygmo): """Particle Swarm Optimization.""" __version__ = '1.0.0' __flags = None @dataclass class Flags: """PSO flags. Parameters ---------- generations : int omega : float Inertia weight (or constriction factor) eta1 : float Social component eta2 : float Cognitive component max_vel : float Maximum allowed particle velocities (normalized with respect to the bounds width) variant : int Algorithmic variant neighb_type : int Swarm topology (defining each particle’s neighbours) neighb_param : int Topology parameter (defines how many neighbours to consider) memory : bool When true the velocities are not reset between successive calls to the evolve method seed : int Seed used by the internal random number generator. """ generations: int omega: float eta1: float eta2: float max_vel: float variant: int neighb_type: int neighb_param: int memory: bool seed: int def __init__(self, generations, population, omega, eta1, eta2, max_vel, variant, neighb_type, neighb_param, memory, seed, dimension, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output): """Initialize a PSO object. Parameters ---------- generations : int population : int omega : float Inertia weight (or constriction factor) eta1 : float Social component eta2 : float Cognitive component max_vel : float Maximum allowed particle velocities (normalized with respect to the bounds width) variant : int Algorithmic variant neighb_type : int Swarm topology (defining each particle’s neighbours) neighb_param : int Topology parameter (defines how many neighbours to consider) memory : bool When true the velocities are not reset between successive calls to the evolve method seed : int Seed used by the internal random number generator. """ self.__flags = self.Flags(generations, omega, eta1, eta2, max_vel, variant, neighb_type, neighb_param, memory, seed) super().__init__(dimension, population, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output) def run(self, benchmark): """Execute the algorithm. Parameter -------- benchmark : str """ if self.__flags.seed: algorithm = pg.pso(self.__flags.generations, self.__flags.omega, self.__flags.eta1, self.__flags.eta2, self.__flags.max_vel, self.__flags.variant, self.__flags.neighb_type, self.__flags.neighb_param, self.__flags.memory, self.__flags.seed) else: algorithm = pg.pso(self.__flags.generations, self.__flags.omega, self.__flags.eta1, self.__flags.eta2, self.__flags.max_vel, self.__flags.variant, self.__flags.neighb_type, self.__flags.neighb_param, self.__flags.memory) # Execute super().exec(algorithm, benchmark)
""" Copyright 2021 <NAME>. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os from dataclasses import dataclass import pygmo as pg from yacos.essential import Sequence from yacos.essential import IO from yacos.essential import Engine class Pygmo: """A Pygmo's strategy.""" __version__ = '1.0.0' __flags = None # {key: {'goal': float, # 'seq': list}} __results = None # SGA # {gen = {'fevals': int, # 'best': float, # 'improvement': float}} # # PSO # {gen: {'fevals': int, # 'gbest': float, # 'meanvel': float, # 'meanlbest': float, # 'avgdist': float} __log = None class Problem: """Pygmo's problem.""" def __init__(self, first_key, last_key, passes_dict, dimension, goal, compiler, benchmark_directory, working_set, times, tool, verify_output): """Construct a Pygmo problem. Parameters ---------- first_key : int The index of the first pass. last_key : int The index of the last pass. passes_dict : dict The dictionary with the available passes. dimension : int The length of a sequence. goal : str compiler : str benchmark_directory : str working_set : int times: int tool: str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ self.first_key = first_key self.last_key = last_key self.passes_dict = passes_dict self.dimension = dimension self.goal = goal self.compiler = compiler self.benchmark_directory = benchmark_directory self.working_set = working_set self.times = times self.tool = tool self.verify_output = verify_output def __deepcopy__(self, *args, **kwargs): """Deeep copy.""" return self def fitness(self, sequence): """Calculate and return the fitness.""" sequence = Sequence.fix_index(list(sequence)) sequence = Sequence.sanitize(sequence) sequence = Sequence.index_pass_to_list(sequence, self.passes_dict) goal_value = Engine.evaluate(self.goal, Sequence.name_pass_to_string( sequence ), self.compiler, self.benchmark_directory, self.working_set, self.times, self.tool, self.verify_output) return [goal_value] def get_nix(self): """Integer dimension of the problem.""" return self.dimension def get_bounds(self): """Box-bounds.""" return ([self.first_key] * self.dimension, [self.last_key] * self.dimension) def get_name(self): """Problem name.""" return 'Optimization Selection' def get_extra_info(self): """Info.""" return '\tDimensions: ' + str(self.dimension) @dataclass class PygmoFlags: """Pygmo flags. Parameters ---------- first_key : int The index of the first pass. last_key : int The index of the last pass. passes_dict : dict The dictionary with the available passes. dimension : int The length of a sequence. population : int goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times: int Execution times tool : str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ first_key: int last_key: int passes_dict: dict dimension: int population: int goals: dict compiler: str benchmarks_directory: str working_set: int times: int tool: str verify_output: bool def __init__(self, dimension, population, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output): """Initialize the arguments. Parameters ---------- dimension : int The length of a sequence. population : int passes_filename : str The file that describes the passes to use. goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times: int Execution times tool: str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ first_key, last_key, passes_dict = IO.load_passes(passes_filename) # When the goal is obtained during compile time # and the working set is not defined during compilation, # we do not need the working set. self.__flags = self.PygmoFlags(first_key, last_key, passes_dict, dimension, population, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output) @property def results(self): """Getter.""" return self.__results @property def log(self): """Getter.""" return self.__log def exec(self, algorithm, benchmark): """Execute the algorithm. Parameter --------- algorithm : Pygmo algorithm benchmark : str """ # Step 1: Algorithm algorithm = pg.algorithm(algorithm) # algorithm.set_verbosity(1) # Step 2: Instantiate a pygmo problem index = benchmark.find('.') # Benchmark directtory bench_dir = os.path.join(self.__flags.benchmarks_directory, benchmark[:index], benchmark[index+1:]) problem = self.Problem(self.__flags.first_key, self.__flags.last_key, self.__flags.passes_dict, self.__flags.dimension, self.__flags.goals, self.__flags.compiler, bench_dir, self.__flags.working_set, self.__flags.times, self.__flags.tool, self.__flags.verify_output) problem = pg.problem(problem) # Step 3: The initial population population = pg.population(problem, self.__flags.population) # Step 4: Evolve the population population = algorithm.evolve(population) # Step 5: Get the results sga_sequence = population.get_x().tolist() sga_fitness = population.get_f().tolist() self.__results = {} for index in range(self.__flags.population): sequence = Sequence.index_pass_to_list(sga_sequence[index], self.__flags.passes_dict) goal_value = sga_fitness[index][0] if goal_value == float('inf'): continue self.__results[index] = {'seq': sequence, 'goal': goal_value} # Step 6: Get the log self.__log = {} if algorithm.get_name() == 'SGA: Genetic Algorithm': uda = algorithm.extract(pg.sga) log = uda.get_log() for (gen, fevals, best, improvement) in log: self.__log[gen] = {'fevals': fevals, 'best': best, 'improvement': improvement} elif algorithm.get_name() == 'PSO: Particle Swarm Optimization': uda = algorithm.extract(pg.pso) log = uda.get_log() for (gen, fevals, gbest, meanvel, meanlbest, avgdist) in log: self.__log[gen] = {'fevals': fevals, 'gbest': gbest, 'meanvel': meanvel, 'meanlbest': meanlbest, 'avgdist': avgdist} class SGA(Pygmo): """Simple Genetic Algorithm.""" __version__ = '1.0.0' __flags = None @dataclass class Flags: """Pygmo flags. Parameters ---------- generations : int cr : float Crossover probability m : float Mutation probability param_m : float Distribution index (polynomial mutation), gaussian width (gaussian mutation) or inactive (uniform mutation) param_s : float The number of best individuals to use in “truncated” selection or the size of the tournament in tournament selection. crossover : str exponential, binomial or single mutation : str gaussian, polynomial or uniform selection : str tournament or truncated seed : int """ generations: int cr: float m: float param_m: float param_s: float crossover: str mutation: str selection: str seed: int def __init__(self, generations, population, cr, m, param_m, param_s, crossover, mutation, selection, seed, dimension, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output): """Initialize a SGA object. Parameters ---------- generations : int population : int cr : float Crossover probability m : float Mutation probability param_m : float Distribution index (polynomial mutation), gaussian width (gaussian mutation) or inactive (uniform mutation) param_s : float The number of best individuals to use in “truncated” selection or the size of the tournament in tournament selection. crossover : str exponential, binomial or single mutation : str gaussian, polynomial or uniform selection : str tournament or truncated seed : int dimension : int The length of a sequence. passes_filename : str The file that describes the passes to use. goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times : int Execution times tool : str Execution tool verify_output: bool The goal is valid only if the execution status is OK. """ self.__flags = self.Flags(generations, cr, m, param_m, param_s, crossover, mutation, selection, seed) super().__init__(dimension, population, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output) def run(self, benchmark): """Execute the algorithm. Parameter -------- benchmark: str """ if self.__flags.seed is None: algorithm = pg.sga(gen=self.__flags.generations, cr=self.__flags.cr, m=self.__flags.m, param_m=self.__flags.param_m, param_s=self.__flags.param_s, crossover=self.__flags.crossover, mutation=self.__flags.mutation, selection=self.__flags.selection) else: algorithm = pg.sga(gen=self.__flags.generations, cr=self.__flags.cr, m=self.__flags.m, param_m=self.__flags.param_m, param_s=self.__flags.param_s, crossover=self.__flags.crossover, mutation=self.__flags.mutation, selection=self.__flags.selection, seed=self.__flags.seed) # Execute super().exec(algorithm, benchmark) class PSO(Pygmo): """Particle Swarm Optimization.""" __version__ = '1.0.0' __flags = None @dataclass class Flags: """PSO flags. Parameters ---------- generations : int omega : float Inertia weight (or constriction factor) eta1 : float Social component eta2 : float Cognitive component max_vel : float Maximum allowed particle velocities (normalized with respect to the bounds width) variant : int Algorithmic variant neighb_type : int Swarm topology (defining each particle’s neighbours) neighb_param : int Topology parameter (defines how many neighbours to consider) memory : bool When true the velocities are not reset between successive calls to the evolve method seed : int Seed used by the internal random number generator. """ generations: int omega: float eta1: float eta2: float max_vel: float variant: int neighb_type: int neighb_param: int memory: bool seed: int def __init__(self, generations, population, omega, eta1, eta2, max_vel, variant, neighb_type, neighb_param, memory, seed, dimension, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output): """Initialize a PSO object. Parameters ---------- generations : int population : int omega : float Inertia weight (or constriction factor) eta1 : float Social component eta2 : float Cognitive component max_vel : float Maximum allowed particle velocities (normalized with respect to the bounds width) variant : int Algorithmic variant neighb_type : int Swarm topology (defining each particle’s neighbours) neighb_param : int Topology parameter (defines how many neighbours to consider) memory : bool When true the velocities are not reset between successive calls to the evolve method seed : int Seed used by the internal random number generator. """ self.__flags = self.Flags(generations, omega, eta1, eta2, max_vel, variant, neighb_type, neighb_param, memory, seed) super().__init__(dimension, population, passes_filename, goals, compiler, benchmarks_directory, working_set, times, tool, verify_output) def run(self, benchmark): """Execute the algorithm. Parameter -------- benchmark : str """ if self.__flags.seed: algorithm = pg.pso(self.__flags.generations, self.__flags.omega, self.__flags.eta1, self.__flags.eta2, self.__flags.max_vel, self.__flags.variant, self.__flags.neighb_type, self.__flags.neighb_param, self.__flags.memory, self.__flags.seed) else: algorithm = pg.pso(self.__flags.generations, self.__flags.omega, self.__flags.eta1, self.__flags.eta2, self.__flags.max_vel, self.__flags.variant, self.__flags.neighb_type, self.__flags.neighb_param, self.__flags.memory) # Execute super().exec(algorithm, benchmark)
en
0.644459
Copyright 2021 <NAME>. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. A Pygmo's strategy. # {key: {'goal': float, # 'seq': list}} # SGA # {gen = {'fevals': int, # 'best': float, # 'improvement': float}} # # PSO # {gen: {'fevals': int, # 'gbest': float, # 'meanvel': float, # 'meanlbest': float, # 'avgdist': float} Pygmo's problem. Construct a Pygmo problem. Parameters ---------- first_key : int The index of the first pass. last_key : int The index of the last pass. passes_dict : dict The dictionary with the available passes. dimension : int The length of a sequence. goal : str compiler : str benchmark_directory : str working_set : int times: int tool: str Execution tool verify_output: bool The goal is valid only if the execution status is OK. Deeep copy. Calculate and return the fitness. Integer dimension of the problem. Box-bounds. Problem name. Info. Pygmo flags. Parameters ---------- first_key : int The index of the first pass. last_key : int The index of the last pass. passes_dict : dict The dictionary with the available passes. dimension : int The length of a sequence. population : int goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times: int Execution times tool : str Execution tool verify_output: bool The goal is valid only if the execution status is OK. Initialize the arguments. Parameters ---------- dimension : int The length of a sequence. population : int passes_filename : str The file that describes the passes to use. goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times: int Execution times tool: str Execution tool verify_output: bool The goal is valid only if the execution status is OK. # When the goal is obtained during compile time # and the working set is not defined during compilation, # we do not need the working set. Getter. Getter. Execute the algorithm. Parameter --------- algorithm : Pygmo algorithm benchmark : str # Step 1: Algorithm # algorithm.set_verbosity(1) # Step 2: Instantiate a pygmo problem # Benchmark directtory # Step 3: The initial population # Step 4: Evolve the population # Step 5: Get the results # Step 6: Get the log Simple Genetic Algorithm. Pygmo flags. Parameters ---------- generations : int cr : float Crossover probability m : float Mutation probability param_m : float Distribution index (polynomial mutation), gaussian width (gaussian mutation) or inactive (uniform mutation) param_s : float The number of best individuals to use in “truncated” selection or the size of the tournament in tournament selection. crossover : str exponential, binomial or single mutation : str gaussian, polynomial or uniform selection : str tournament or truncated seed : int Initialize a SGA object. Parameters ---------- generations : int population : int cr : float Crossover probability m : float Mutation probability param_m : float Distribution index (polynomial mutation), gaussian width (gaussian mutation) or inactive (uniform mutation) param_s : float The number of best individuals to use in “truncated” selection or the size of the tournament in tournament selection. crossover : str exponential, binomial or single mutation : str gaussian, polynomial or uniform selection : str tournament or truncated seed : int dimension : int The length of a sequence. passes_filename : str The file that describes the passes to use. goals : dict compiler : str benchmarks_directory : str working_set : int The dataset to execute the benchmark. times : int Execution times tool : str Execution tool verify_output: bool The goal is valid only if the execution status is OK. Execute the algorithm. Parameter -------- benchmark: str # Execute Particle Swarm Optimization. PSO flags. Parameters ---------- generations : int omega : float Inertia weight (or constriction factor) eta1 : float Social component eta2 : float Cognitive component max_vel : float Maximum allowed particle velocities (normalized with respect to the bounds width) variant : int Algorithmic variant neighb_type : int Swarm topology (defining each particle’s neighbours) neighb_param : int Topology parameter (defines how many neighbours to consider) memory : bool When true the velocities are not reset between successive calls to the evolve method seed : int Seed used by the internal random number generator. Initialize a PSO object. Parameters ---------- generations : int population : int omega : float Inertia weight (or constriction factor) eta1 : float Social component eta2 : float Cognitive component max_vel : float Maximum allowed particle velocities (normalized with respect to the bounds width) variant : int Algorithmic variant neighb_type : int Swarm topology (defining each particle’s neighbours) neighb_param : int Topology parameter (defines how many neighbours to consider) memory : bool When true the velocities are not reset between successive calls to the evolve method seed : int Seed used by the internal random number generator. Execute the algorithm. Parameter -------- benchmark : str # Execute
2.490748
2
lab_03/main.py
solnishko-pvs/Modeling_BMSTU
0
9825
import tkinter as tk from scipy.stats import chi2, chisquare COLOR = '#dddddd' COLUMNS_COLOR = '#ffffff' MAX_SIZE = 10 WIDGET_WIDTH = 25 class LinearCongruent: m = 2**32 a = 1664525 c = 1013904223 _cur = 1 def next(self): self._cur = (self.a * self._cur + self.c) % self.m return self._cur def khi_krit(arr): min_ = min(arr) cnt = [0 for _ in range(max(arr) - min_ + 1)] for elem in arr: cnt[elem-min_] += 1 n = sum(cnt) k = len(cnt) p = 1 / k chisq = 0 for j in range(k): chisq += cnt[j]**2 / p chisq = chisq / n - n #print(chisquare(cnt)) return (1 - chi2.cdf(chisq, k)) * 100 def get_10_nums(arr, num): cnt = 0 res = [] i = 0 while cnt != 10: if arr[i] > num: res.append(arr[i]) cnt += 1 i += 1 return res class file_nums: def __init__(self): self.nums = None with open('nums.txt', 'r') as f: nums = [list(i.split()) for i in list(f.read().split('\n'))] self.columns = len(nums) self.rows = len(nums[0]) self.nums = [[] for _ in range(self.rows)] for i in range(self.columns): for j in range(self.rows): self.nums[j].append(nums[i][j]) self.cur_x = 0 self.cur_y = 0 def next(self): self.cur_x += 1 if self.cur_x == self.columns: self.cur_x = 0 self.cur_y += 1 if self.cur_y == self.rows: self.cur_y = 0 return self.nums[self.cur_y][self.cur_x] class Block: def __init__(self, master): self.frame = tk.LabelFrame(master, bg=COLOR, text='Ввод данных', width=480, height=110) self.frame.columnconfigure(0, weight=1) self.frame.rowconfigure(0, weight=1) self.frame.grid_propagate(False) self.label_input = tk.Label(self.frame, text='Ваши числа: ', bg=COLOR) self.entry_numbers = tk.Entry(self.frame, width=WIDGET_WIDTH+10) self.calculate_custom_result_btn = tk.Button(self.frame, text="Статистика хи-квадрат ваших чисел: ", width=WIDGET_WIDTH+6, bg=COLOR, command=self.user_solve) self.label_result = tk.Label(self.frame, text='', bg=COLOR) self.calculate_result_btn = tk.Button(self.frame, text="Вычислить для 1000 чисел", width=WIDGET_WIDTH, bg=COLOR, command=self.solve) self.listbox_frame = tk.LabelFrame(master, text='Матрица', bg=COLOR, width=530, height=200) self.listbox_frame.grid_propagate(False) self.result_frame = tk.LabelFrame(master, bg=COLOR, text='Результат', width=510, height=270) self.result_frame.grid_propagate(False) self.table_label = tk.Label(self.result_frame, text='Табличный способ', bg=COLOR, bd=3) self.algorithm_label = tk.Label(self.result_frame, text='Алгоритмический способ', bg=COLOR, bd=3) self.one_digit_table = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.two_digit_table = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.three_digit_table = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.one_digit_algorithm = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.two_digit_algorithm = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.three_digit_algorithm = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.one_digit_table.insert(tk.END, '1 разряд') self.two_digit_table.insert(tk.END, '2 разряда') self.three_digit_table.insert(tk.END, '3 разряда') self.one_digit_algorithm.insert(tk.END, '1 разряд') self.two_digit_algorithm.insert(tk.END, '2 разряда') self.three_digit_algorithm.insert(tk.END, '3 разряда') self.label_khi = tk.Label(self.result_frame, text='% статистики хи-квадрат', bg=COLOR, bd=3) self.one_digit_table_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.two_digit_table_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.three_digit_table_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.one_digit_algorithm_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.two_digit_algorithm_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.three_digit_algorithm_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.table_label.grid(row=0, column=0, columnspan=3) self.algorithm_label.grid(row=0, column=3, columnspan=3) self.one_digit_table.grid(row=1, column=0, padx=1) self.two_digit_table.grid(row=1, column=1, padx=1) self.three_digit_table.grid(row=1, column=2, padx=1) self.one_digit_algorithm.grid(row=1, column=3, padx=1) self.two_digit_algorithm.grid(row=1, column=4, padx=1) self.three_digit_algorithm.grid(row=1, column=5, padx=1) self.one_digit_table_khi.grid(row=3, column=0, padx=1) self.two_digit_table_khi.grid(row=3, column=1, padx=1) self.three_digit_table_khi.grid(row=3, column=2, padx=1) self.one_digit_algorithm_khi.grid(row=3, column=3, padx=1) self.two_digit_algorithm_khi.grid(row=3, column=4, padx=1) self.three_digit_algorithm_khi.grid(row=3, column=5, padx=1) self.label_khi.grid(row=2, column=0, columnspan=6) self.label_input.grid(row=0, column=0) self.entry_numbers.grid(row=0, column=1, padx=10) self.calculate_custom_result_btn.grid(row=1, column=0, pady=4) self.label_result.grid(row=1, column=1) self.calculate_result_btn.grid(row=2, column=0, columnspan=2, pady=2) self.data = None self.size = None self.table_gen = file_nums() self.listbox_list = [tk.Listbox(self.listbox_frame, selectmode=tk.SINGLE, width=8, bg=COLOR) for _ in range(MAX_SIZE)] def defocus(self, event): event.widget.master.focus_set() def make_view(self): self.frame.pack() #self.listbox_frame.pack() self.result_frame.pack() def fill_data(self, size): for i in range(size): for j in range(size): self.listbox_list[i].insert(tk.END, self.data[j, i]) def user_solve(self): inp = self.entry_numbers.get() try: x = list(map(int, inp.split())) self.label_result['text'] = str(round(khi_krit(x), 4)) + '%' except: self.label_result['text'] = 'Ошибка ввода!!!' def solve(self): alg_arrs = [[int(generator.next()) % j for _ in range(1000)] for j in [10, 100, 1000]] table_arrs = [[int(self.table_gen.next()[:j]) for _ in range(1000)] for j in [1, 2, 3]] self.one_digit_algorithm.delete(1, tk.END) self.two_digit_algorithm.delete(1, tk.END) self.three_digit_algorithm.delete(1, tk.END) self.one_digit_algorithm['height'] = 11 self.two_digit_algorithm['height'] = 11 self.three_digit_algorithm['height'] = 11 self.one_digit_table.delete(1, tk.END) self.two_digit_table.delete(1, tk.END) self.three_digit_table.delete(1, tk.END) self.one_digit_table['height'] = 11 self.two_digit_table['height'] = 11 self.three_digit_table['height'] = 11 [self.one_digit_algorithm.insert(tk.END, i) for i in get_10_nums(alg_arrs[0], -1)] [self.two_digit_algorithm.insert(tk.END, i) for i in get_10_nums(alg_arrs[1], 9)] [self.three_digit_algorithm.insert(tk.END, i) for i in get_10_nums(alg_arrs[2], 99)] [self.one_digit_table.insert(tk.END, i) for i in get_10_nums(table_arrs[0], -1)] [self.two_digit_table.insert(tk.END, i) for i in get_10_nums(table_arrs[1], 9)] [self.three_digit_table.insert(tk.END, i) for i in get_10_nums(table_arrs[2], 99)] self.one_digit_algorithm_khi['text'] = str(round(khi_krit(alg_arrs[0]), 4)) + '%' self.two_digit_algorithm_khi['text'] = str(round(khi_krit(alg_arrs[1]), 4)) + '%' self.three_digit_algorithm_khi['text'] = str(round(khi_krit(alg_arrs[2]), 4)) + '%' self.one_digit_table_khi['text'] = str(round(khi_krit(table_arrs[0]), 4)) + '%' self.two_digit_table_khi['text'] = str(round(khi_krit(table_arrs[1]), 4)) + '%' self.three_digit_table_khi['text'] = str(round(khi_krit(table_arrs[2]), 4)) + '%' generator = LinearCongruent() root = tk.Tk() root['bg'] = COLOR root.geometry('540x390') first_block = Block(root) first_block.make_view() root.mainloop()
import tkinter as tk from scipy.stats import chi2, chisquare COLOR = '#dddddd' COLUMNS_COLOR = '#ffffff' MAX_SIZE = 10 WIDGET_WIDTH = 25 class LinearCongruent: m = 2**32 a = 1664525 c = 1013904223 _cur = 1 def next(self): self._cur = (self.a * self._cur + self.c) % self.m return self._cur def khi_krit(arr): min_ = min(arr) cnt = [0 for _ in range(max(arr) - min_ + 1)] for elem in arr: cnt[elem-min_] += 1 n = sum(cnt) k = len(cnt) p = 1 / k chisq = 0 for j in range(k): chisq += cnt[j]**2 / p chisq = chisq / n - n #print(chisquare(cnt)) return (1 - chi2.cdf(chisq, k)) * 100 def get_10_nums(arr, num): cnt = 0 res = [] i = 0 while cnt != 10: if arr[i] > num: res.append(arr[i]) cnt += 1 i += 1 return res class file_nums: def __init__(self): self.nums = None with open('nums.txt', 'r') as f: nums = [list(i.split()) for i in list(f.read().split('\n'))] self.columns = len(nums) self.rows = len(nums[0]) self.nums = [[] for _ in range(self.rows)] for i in range(self.columns): for j in range(self.rows): self.nums[j].append(nums[i][j]) self.cur_x = 0 self.cur_y = 0 def next(self): self.cur_x += 1 if self.cur_x == self.columns: self.cur_x = 0 self.cur_y += 1 if self.cur_y == self.rows: self.cur_y = 0 return self.nums[self.cur_y][self.cur_x] class Block: def __init__(self, master): self.frame = tk.LabelFrame(master, bg=COLOR, text='Ввод данных', width=480, height=110) self.frame.columnconfigure(0, weight=1) self.frame.rowconfigure(0, weight=1) self.frame.grid_propagate(False) self.label_input = tk.Label(self.frame, text='Ваши числа: ', bg=COLOR) self.entry_numbers = tk.Entry(self.frame, width=WIDGET_WIDTH+10) self.calculate_custom_result_btn = tk.Button(self.frame, text="Статистика хи-квадрат ваших чисел: ", width=WIDGET_WIDTH+6, bg=COLOR, command=self.user_solve) self.label_result = tk.Label(self.frame, text='', bg=COLOR) self.calculate_result_btn = tk.Button(self.frame, text="Вычислить для 1000 чисел", width=WIDGET_WIDTH, bg=COLOR, command=self.solve) self.listbox_frame = tk.LabelFrame(master, text='Матрица', bg=COLOR, width=530, height=200) self.listbox_frame.grid_propagate(False) self.result_frame = tk.LabelFrame(master, bg=COLOR, text='Результат', width=510, height=270) self.result_frame.grid_propagate(False) self.table_label = tk.Label(self.result_frame, text='Табличный способ', bg=COLOR, bd=3) self.algorithm_label = tk.Label(self.result_frame, text='Алгоритмический способ', bg=COLOR, bd=3) self.one_digit_table = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.two_digit_table = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.three_digit_table = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.one_digit_algorithm = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.two_digit_algorithm = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.three_digit_algorithm = tk.Listbox(self.result_frame, selectmode=tk.SINGLE, width=13, bg=COLUMNS_COLOR, height=1) self.one_digit_table.insert(tk.END, '1 разряд') self.two_digit_table.insert(tk.END, '2 разряда') self.three_digit_table.insert(tk.END, '3 разряда') self.one_digit_algorithm.insert(tk.END, '1 разряд') self.two_digit_algorithm.insert(tk.END, '2 разряда') self.three_digit_algorithm.insert(tk.END, '3 разряда') self.label_khi = tk.Label(self.result_frame, text='% статистики хи-квадрат', bg=COLOR, bd=3) self.one_digit_table_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.two_digit_table_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.three_digit_table_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.one_digit_algorithm_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.two_digit_algorithm_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.three_digit_algorithm_khi = tk.Label(self.result_frame, text='', bg=COLOR, bd=3) self.table_label.grid(row=0, column=0, columnspan=3) self.algorithm_label.grid(row=0, column=3, columnspan=3) self.one_digit_table.grid(row=1, column=0, padx=1) self.two_digit_table.grid(row=1, column=1, padx=1) self.three_digit_table.grid(row=1, column=2, padx=1) self.one_digit_algorithm.grid(row=1, column=3, padx=1) self.two_digit_algorithm.grid(row=1, column=4, padx=1) self.three_digit_algorithm.grid(row=1, column=5, padx=1) self.one_digit_table_khi.grid(row=3, column=0, padx=1) self.two_digit_table_khi.grid(row=3, column=1, padx=1) self.three_digit_table_khi.grid(row=3, column=2, padx=1) self.one_digit_algorithm_khi.grid(row=3, column=3, padx=1) self.two_digit_algorithm_khi.grid(row=3, column=4, padx=1) self.three_digit_algorithm_khi.grid(row=3, column=5, padx=1) self.label_khi.grid(row=2, column=0, columnspan=6) self.label_input.grid(row=0, column=0) self.entry_numbers.grid(row=0, column=1, padx=10) self.calculate_custom_result_btn.grid(row=1, column=0, pady=4) self.label_result.grid(row=1, column=1) self.calculate_result_btn.grid(row=2, column=0, columnspan=2, pady=2) self.data = None self.size = None self.table_gen = file_nums() self.listbox_list = [tk.Listbox(self.listbox_frame, selectmode=tk.SINGLE, width=8, bg=COLOR) for _ in range(MAX_SIZE)] def defocus(self, event): event.widget.master.focus_set() def make_view(self): self.frame.pack() #self.listbox_frame.pack() self.result_frame.pack() def fill_data(self, size): for i in range(size): for j in range(size): self.listbox_list[i].insert(tk.END, self.data[j, i]) def user_solve(self): inp = self.entry_numbers.get() try: x = list(map(int, inp.split())) self.label_result['text'] = str(round(khi_krit(x), 4)) + '%' except: self.label_result['text'] = 'Ошибка ввода!!!' def solve(self): alg_arrs = [[int(generator.next()) % j for _ in range(1000)] for j in [10, 100, 1000]] table_arrs = [[int(self.table_gen.next()[:j]) for _ in range(1000)] for j in [1, 2, 3]] self.one_digit_algorithm.delete(1, tk.END) self.two_digit_algorithm.delete(1, tk.END) self.three_digit_algorithm.delete(1, tk.END) self.one_digit_algorithm['height'] = 11 self.two_digit_algorithm['height'] = 11 self.three_digit_algorithm['height'] = 11 self.one_digit_table.delete(1, tk.END) self.two_digit_table.delete(1, tk.END) self.three_digit_table.delete(1, tk.END) self.one_digit_table['height'] = 11 self.two_digit_table['height'] = 11 self.three_digit_table['height'] = 11 [self.one_digit_algorithm.insert(tk.END, i) for i in get_10_nums(alg_arrs[0], -1)] [self.two_digit_algorithm.insert(tk.END, i) for i in get_10_nums(alg_arrs[1], 9)] [self.three_digit_algorithm.insert(tk.END, i) for i in get_10_nums(alg_arrs[2], 99)] [self.one_digit_table.insert(tk.END, i) for i in get_10_nums(table_arrs[0], -1)] [self.two_digit_table.insert(tk.END, i) for i in get_10_nums(table_arrs[1], 9)] [self.three_digit_table.insert(tk.END, i) for i in get_10_nums(table_arrs[2], 99)] self.one_digit_algorithm_khi['text'] = str(round(khi_krit(alg_arrs[0]), 4)) + '%' self.two_digit_algorithm_khi['text'] = str(round(khi_krit(alg_arrs[1]), 4)) + '%' self.three_digit_algorithm_khi['text'] = str(round(khi_krit(alg_arrs[2]), 4)) + '%' self.one_digit_table_khi['text'] = str(round(khi_krit(table_arrs[0]), 4)) + '%' self.two_digit_table_khi['text'] = str(round(khi_krit(table_arrs[1]), 4)) + '%' self.three_digit_table_khi['text'] = str(round(khi_krit(table_arrs[2]), 4)) + '%' generator = LinearCongruent() root = tk.Tk() root['bg'] = COLOR root.geometry('540x390') first_block = Block(root) first_block.make_view() root.mainloop()
en
0.191331
#print(chisquare(cnt)) #self.listbox_frame.pack()
2.752201
3
VacationPy/api_keys.py
tylermneher/python-api-challenge
0
9826
# OpenWeatherMap API Key weather_api_key = "MyOpenWeatherMapAPIKey" # Google API Key g_key = "MyGoogleKey"
# OpenWeatherMap API Key weather_api_key = "MyOpenWeatherMapAPIKey" # Google API Key g_key = "MyGoogleKey"
en
0.293972
# OpenWeatherMap API Key # Google API Key
1.271795
1
aries_cloudagent/protocols/actionmenu/v1_0/messages/menu_request.py
panickervinod/aries-cloudagent-python
0
9827
"""Represents a request for an action menu.""" from .....messaging.agent_message import AgentMessage, AgentMessageSchema from ..message_types import MENU_REQUEST, PROTOCOL_PACKAGE HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.menu_request_handler.MenuRequestHandler" class MenuRequest(AgentMessage): """Class representing a request for an action menu.""" class Meta: """Metadata for action menu request.""" handler_class = HANDLER_CLASS message_type = MENU_REQUEST schema_class = "MenuRequestSchema" def __init__(self, **kwargs): """Initialize a menu request object.""" super().__init__(**kwargs) class MenuRequestSchema(AgentMessageSchema): """MenuRequest schema class.""" class Meta: """MenuRequest schema metadata.""" model_class = MenuRequest
"""Represents a request for an action menu.""" from .....messaging.agent_message import AgentMessage, AgentMessageSchema from ..message_types import MENU_REQUEST, PROTOCOL_PACKAGE HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.menu_request_handler.MenuRequestHandler" class MenuRequest(AgentMessage): """Class representing a request for an action menu.""" class Meta: """Metadata for action menu request.""" handler_class = HANDLER_CLASS message_type = MENU_REQUEST schema_class = "MenuRequestSchema" def __init__(self, **kwargs): """Initialize a menu request object.""" super().__init__(**kwargs) class MenuRequestSchema(AgentMessageSchema): """MenuRequest schema class.""" class Meta: """MenuRequest schema metadata.""" model_class = MenuRequest
en
0.502618
Represents a request for an action menu. Class representing a request for an action menu. Metadata for action menu request. Initialize a menu request object. MenuRequest schema class. MenuRequest schema metadata.
2.192493
2
porthole/management/commands/brocade.py
jsayles/Porthole
0
9828
<filename>porthole/management/commands/brocade.py from django.core.management.base import BaseCommand, CommandError from django.conf import settings from porthole import models, brocade class Command(BaseCommand): help = "Command the Brocade switch stacks" args = "" requires_system_checks = False def add_arguments(self, parser): parser.add_argument( '--print_stacks', action='store_true', dest='print_stacks', help='Show the VLAN data from all switch stacks', ) def handle(self, *args, **options): if options['print_stacks']: self.print_stacks() def print_stacks(self): for s in models.SwitchStack.objects.all(): stack = brocade.SwitchStack(s.name, s.ip_address, s.raw_username, s.raw_password, port=s.port) stack.print_stack() print()
<filename>porthole/management/commands/brocade.py from django.core.management.base import BaseCommand, CommandError from django.conf import settings from porthole import models, brocade class Command(BaseCommand): help = "Command the Brocade switch stacks" args = "" requires_system_checks = False def add_arguments(self, parser): parser.add_argument( '--print_stacks', action='store_true', dest='print_stacks', help='Show the VLAN data from all switch stacks', ) def handle(self, *args, **options): if options['print_stacks']: self.print_stacks() def print_stacks(self): for s in models.SwitchStack.objects.all(): stack = brocade.SwitchStack(s.name, s.ip_address, s.raw_username, s.raw_password, port=s.port) stack.print_stack() print()
none
1
2.100889
2
joulia/unit_conversions_test.py
willjschmitt/joulia-webserver
0
9829
"""Tests joulia.unit_conversions. """ from django.test import TestCase from joulia import unit_conversions class GramsToPoundsTest(TestCase): def test_grams_to_pounds(self): self.assertEquals(unit_conversions.grams_to_pounds(1000.0), 2.20462) class GramsToOuncesTest(TestCase): def test_grams_to_ounces(self): self.assertEquals(unit_conversions.grams_to_ounces(1000.0), 35.27392)
"""Tests joulia.unit_conversions. """ from django.test import TestCase from joulia import unit_conversions class GramsToPoundsTest(TestCase): def test_grams_to_pounds(self): self.assertEquals(unit_conversions.grams_to_pounds(1000.0), 2.20462) class GramsToOuncesTest(TestCase): def test_grams_to_ounces(self): self.assertEquals(unit_conversions.grams_to_ounces(1000.0), 35.27392)
en
0.318507
Tests joulia.unit_conversions.
2.341977
2
Django/blog/tests.py
zarif007/Blog-site
1
9830
<filename>Django/blog/tests.py<gh_stars>1-10 from django.contrib.auth.models import User from django.test import TestCase from blog.models import Category, Post class Test_Create_Post(TestCase): @classmethod def setUpTestData(cls): test_category = Category.objects.create(name='django') testuser1 = User.objects.create_user( username='test-123', password='<PASSWORD>' ) test_post = Post.objects.create(category_id=1, title='Post', excerpt='Excerpt', content='Content', slug='Slug', author_id=1, status='published') def test_blog_contenet(self): post = Post.postobjects.get(id=1) cat = Category.objects.get(id=1) author = f'{post.author}' excerpt = f'{post.excerpt}' title = f'{post.title}' content = f'{post.content}' status = f'{post.status}' self.assertEqual(author, 'test-123') self.assertEqual(title, 'Post') self.assertEqual(content, 'Content') self.assertEqual(status, 'published') self.assertEqual(str(post), 'Post') self.assertEqual(str(cat), 'django')
<filename>Django/blog/tests.py<gh_stars>1-10 from django.contrib.auth.models import User from django.test import TestCase from blog.models import Category, Post class Test_Create_Post(TestCase): @classmethod def setUpTestData(cls): test_category = Category.objects.create(name='django') testuser1 = User.objects.create_user( username='test-123', password='<PASSWORD>' ) test_post = Post.objects.create(category_id=1, title='Post', excerpt='Excerpt', content='Content', slug='Slug', author_id=1, status='published') def test_blog_contenet(self): post = Post.postobjects.get(id=1) cat = Category.objects.get(id=1) author = f'{post.author}' excerpt = f'{post.excerpt}' title = f'{post.title}' content = f'{post.content}' status = f'{post.status}' self.assertEqual(author, 'test-123') self.assertEqual(title, 'Post') self.assertEqual(content, 'Content') self.assertEqual(status, 'published') self.assertEqual(str(post), 'Post') self.assertEqual(str(cat), 'django')
none
1
2.406366
2
scripts/train_presets/beads.py
kreshuklab/hylfm-net
8
9831
from pathlib import Path from hylfm.hylfm_types import ( CriterionChoice, DatasetChoice, LRSchedThresMode, LRSchedulerChoice, MetricChoice, OptimizerChoice, PeriodUnit, ) from hylfm.model import HyLFM_Net from hylfm.train import train if __name__ == "__main__": train( dataset=DatasetChoice.beads_highc_b, batch_multiplier=2, batch_size=1, crit_apply_weight_above_threshold=False, crit_beta=1.0, crit_decay_weight_by=0.8, crit_decay_weight_every_unit=PeriodUnit.epoch, crit_decay_weight_every_value=1, crit_decay_weight_limit=1.0, crit_ms_ssim_weight=0.01, crit_threshold=0.5, crit_weight=0.001, criterion=CriterionChoice.WeightedSmoothL1, data_range=1.0, eval_batch_size=1, interpolation_order=2, lr_sched_factor=0.5, lr_sched_patience=10, lr_sched_thres=0.0001, lr_sched_thres_mode=LRSchedThresMode.abs, lr_scheduler=LRSchedulerChoice.ReduceLROnPlateau, max_epochs=10, model_weights=None, # Path() opt_lr=3e-4, opt_momentum=0.0, opt_weight_decay=0.0, optimizer=OptimizerChoice.Adam, patience=5, score_metric=MetricChoice.MS_SSIM, seed=None, validate_every_unit=PeriodUnit.epoch, validate_every_value=1, win_sigma=1.5, win_size=11, # model nnum=19, z_out=51, kernel2d=3, c00_2d=976, c01_2d=976, c02_2d=0, c03_2d=0, c04_2d=0, up0_2d=488, c10_2d=488, c11_2d=0, c12_2d=0, c13_2d=0, c14_2d=0, up1_2d=244, c20_2d=244, c21_2d=0, c22_2d=0, c23_2d=0, c24_2d=0, up2_2d=0, c30_2d=0, c31_2d=0, c32_2d=0, c33_2d=0, c34_2d=0, last_kernel2d=1, cin_3d=7, kernel3d=3, c00_3d=7, c01_3d=0, c02_3d=0, c03_3d=0, c04_3d=0, up0_3d=7, c10_3d=7, c11_3d=7, c12_3d=0, c13_3d=0, c14_3d=0, up1_3d=0, c20_3d=0, c21_3d=0, c22_3d=0, c23_3d=0, c24_3d=0, up2_3d=0, c30_3d=0, c31_3d=0, c32_3d=0, c33_3d=0, c34_3d=0, init_fn=HyLFM_Net.InitName.xavier_uniform_, final_activation=None, )
from pathlib import Path from hylfm.hylfm_types import ( CriterionChoice, DatasetChoice, LRSchedThresMode, LRSchedulerChoice, MetricChoice, OptimizerChoice, PeriodUnit, ) from hylfm.model import HyLFM_Net from hylfm.train import train if __name__ == "__main__": train( dataset=DatasetChoice.beads_highc_b, batch_multiplier=2, batch_size=1, crit_apply_weight_above_threshold=False, crit_beta=1.0, crit_decay_weight_by=0.8, crit_decay_weight_every_unit=PeriodUnit.epoch, crit_decay_weight_every_value=1, crit_decay_weight_limit=1.0, crit_ms_ssim_weight=0.01, crit_threshold=0.5, crit_weight=0.001, criterion=CriterionChoice.WeightedSmoothL1, data_range=1.0, eval_batch_size=1, interpolation_order=2, lr_sched_factor=0.5, lr_sched_patience=10, lr_sched_thres=0.0001, lr_sched_thres_mode=LRSchedThresMode.abs, lr_scheduler=LRSchedulerChoice.ReduceLROnPlateau, max_epochs=10, model_weights=None, # Path() opt_lr=3e-4, opt_momentum=0.0, opt_weight_decay=0.0, optimizer=OptimizerChoice.Adam, patience=5, score_metric=MetricChoice.MS_SSIM, seed=None, validate_every_unit=PeriodUnit.epoch, validate_every_value=1, win_sigma=1.5, win_size=11, # model nnum=19, z_out=51, kernel2d=3, c00_2d=976, c01_2d=976, c02_2d=0, c03_2d=0, c04_2d=0, up0_2d=488, c10_2d=488, c11_2d=0, c12_2d=0, c13_2d=0, c14_2d=0, up1_2d=244, c20_2d=244, c21_2d=0, c22_2d=0, c23_2d=0, c24_2d=0, up2_2d=0, c30_2d=0, c31_2d=0, c32_2d=0, c33_2d=0, c34_2d=0, last_kernel2d=1, cin_3d=7, kernel3d=3, c00_3d=7, c01_3d=0, c02_3d=0, c03_3d=0, c04_3d=0, up0_3d=7, c10_3d=7, c11_3d=7, c12_3d=0, c13_3d=0, c14_3d=0, up1_3d=0, c20_3d=0, c21_3d=0, c22_3d=0, c23_3d=0, c24_3d=0, up2_3d=0, c30_3d=0, c31_3d=0, c32_3d=0, c33_3d=0, c34_3d=0, init_fn=HyLFM_Net.InitName.xavier_uniform_, final_activation=None, )
en
0.604203
# Path() # model
1.849243
2
TrainingPreprocess/filtered_to_dataset.py
CsekM8/LVH-THESIS
0
9832
import os import pickle from PIL import Image class PatientToImageFolder: def __init__(self, sourceFolder): self.sourceFolder = sourceFolder # How many patient with contrast SA for each pathology (used for classification) self.contrastSApathologyDict = {} # How many patient with contrast LA for each pathology (used for classification) self.contrastCH2pathologyDict = {} self.contrastCH3pathologyDict = {} self.contrastCH4pathologyDict = {} # How many patient with SA image (used for autoencoder training) self.totalSaImagePatientNum = 0 self.curSaImagePatientNum = 0 # How many patient with LA image (used for autoencoder training) self.totalCH2ImagePatientNum = 0 self.curCH2ImagePatientNum = 0 self.totalCH3ImagePatientNum = 0 self.curCH3ImagePatientNum = 0 self.totalCH4ImagePatientNum = 0 self.curCH4ImagePatientNum = 0 self.curContrastSaImagePatientNum = {} self.curContrastCH2ImagePatientNum = {} self.curContrastCH3ImagePatientNum = {} self.curContrastCH4ImagePatientNum = {} self.collectInfo() def collectInfo(self): for file in os.listdir(self.sourceFolder): if ".p" in file: tmpPat = pickle.load(open(os.path.join(self.sourceFolder, file), 'rb')) patho = tmpPat.pathology.strip() if "U18" in patho or "sport" in patho or "Normal" in patho: continue # elif "sport" in patho: # patho = "Sport" # elif "Normal" not in patho and "HCM" not in patho: # patho = "Other" if tmpPat.normalSaImages is not None: self.totalSaImagePatientNum += 1 if (tmpPat.contrastSaImages is not None and tmpPat.contrastLaImages.ch2Images is not None and tmpPat.contrastLaImages.ch3Images is not None and tmpPat.contrastLaImages.ch4Images is not None): if patho in self.contrastSApathologyDict: self.contrastSApathologyDict[patho] += 1 else: self.contrastSApathologyDict[patho] = 1 if patho in self.contrastCH2pathologyDict: self.contrastCH2pathologyDict[patho] += 1 else: self.contrastCH2pathologyDict[patho] = 1 if patho in self.contrastCH3pathologyDict: self.contrastCH3pathologyDict[patho] += 1 else: self.contrastCH3pathologyDict[patho] = 1 if patho in self.contrastCH4pathologyDict: self.contrastCH4pathologyDict[patho] += 1 else: self.contrastCH4pathologyDict[patho] = 1 if tmpPat.normalLaImages.ch2Images is not None: self.totalCH2ImagePatientNum += 1 if tmpPat.normalLaImages.ch3Images is not None: self.totalCH3ImagePatientNum += 1 if tmpPat.normalLaImages.ch4Images is not None: self.totalCH4ImagePatientNum += 1 for key in self.contrastSApathologyDict: self.curContrastSaImagePatientNum[key] = 0 for key in self.contrastCH2pathologyDict: self.curContrastCH2ImagePatientNum[key] = 0 for key in self.contrastCH3pathologyDict: self.curContrastCH3ImagePatientNum[key] = 0 for key in self.contrastCH4pathologyDict: self.curContrastCH4ImagePatientNum[key] = 0 def convertImage(self, image_2d): # if image_2d.min() > 254: # return None # Converting image from numpy array to PIL. pil_img = Image.fromarray(image_2d) if pil_img.getbbox() is None: return None return pil_img def createAutoEncoderImageFolderStructure(self, folderName): autoFolder = os.path.join(os.path.dirname(self.sourceFolder), folderName) autoTrainingFolder = os.path.join(autoFolder, "training") autoTestFolder = os.path.join(autoFolder, "test") os.makedirs(autoTrainingFolder) os.makedirs(autoTestFolder) return autoFolder, autoTrainingFolder, autoTestFolder def createClassificationImageFolderStructure(self, folderName): classFolder = os.path.join(os.path.dirname(self.sourceFolder), folderName) classTrainingFolder = os.path.join(classFolder, "training") classValidationFolder = os.path.join(classFolder, "validation") classTestFolder = os.path.join(classFolder, "test") classAllFolder = os.path.join(classFolder, 'all') os.makedirs(classTrainingFolder) os.makedirs(classValidationFolder) os.makedirs(classTestFolder) os.makedirs(classAllFolder) return classFolder, classTrainingFolder, classValidationFolder, classTestFolder, classAllFolder def saveImageForClassification(self, image, patientId, patho, testFolder, validationFolder, trainingFolder, axis, imPatho, curPatientNum, allFolder, pathologyDict): pil_img = self.convertImage(image[:, :]) if pil_img is not None: if (curPatientNum[patho] <= pathologyDict[patho] * 0.075 or (pathologyDict[patho] * 0.85 <= curPatientNum[patho] <= pathologyDict[patho] * 0.925)): imFolder = os.path.join(testFolder, imPatho) os.makedirs(imFolder, exist_ok=True) patientFolder = os.path.join(self.patientSeperatedTestFolder, imPatho + '_' + patientId) os.makedirs(patientFolder, exist_ok=True) elif ((pathologyDict[patho] * 0.075 <= curPatientNum[patho] <= pathologyDict[patho] * 0.15) or curPatientNum[patho] >= int(pathologyDict[patho] * 0.925)): imFolder = os.path.join(validationFolder, imPatho) os.makedirs(imFolder, exist_ok=True) patientFolder = os.path.join(self.patientSeperatedValidationFolder, imPatho + '_' + patientId) os.makedirs(patientFolder, exist_ok=True) else: imFolder = os.path.join(trainingFolder, imPatho) os.makedirs(imFolder, exist_ok=True) patientFolder = os.path.join(self.patientSeperatedTrainingFolder, imPatho + '_' + patientId) os.makedirs(patientFolder, exist_ok=True) axisFolder = os.path.join(patientFolder, axis) os.makedirs(axisFolder, exist_ok=True) pil_img.save(os.path.join(imFolder, "{}.png".format(patientId))) # pil_img.save(os.path.join(allFolder, "{}.png".format(patientId))) pil_img.save(os.path.join(axisFolder, "{}.png".format(patientId))) file = open(os.path.join(patientFolder, "pathology.txt"), "w") file.write("{}\n".format(patho)) file.close() def saveImageForAutoEncoder(self, images, patientId, testFolder, trainingFolder, curPatientNum, totalPatientNum, sliceIdx, frameIdx): if sliceIdx is not None: pil_img = self.convertImage(images[sliceIdx, frameIdx, :, :]) else: pil_img = self.convertImage(images[frameIdx, :, :]) if pil_img is not None: if (curPatientNum <= totalPatientNum * 0.1 or curPatientNum >= int(totalPatientNum * 0.9)): if sliceIdx is not None: pil_img.save(os.path.join(testFolder, "{}_{}_{}.png".format(patientId, sliceIdx, frameIdx))) else: pil_img.save(os.path.join(testFolder, "{}_{}.png".format(patientId, frameIdx))) else: if sliceIdx is not None: pil_img.save(os.path.join(trainingFolder, "{}_{}_{}.png".format(patientId, sliceIdx, frameIdx))) else: pil_img.save(os.path.join(trainingFolder, "{}_{}.png".format(patientId, frameIdx))) def createImageFolderDatasets(self): subfol = "only_abnormal" # autoSaFolder, autoSaTrainingFolder, autoSaTestFolder = self.createAutoEncoderImageFolderStructure( # "SaAutoEncoder") (contrastSaFolder, contrastSaTrainingFolder, contrastSaValidationFolder, contrastSaTestFolder, contrastSaAllFolder) = self.createClassificationImageFolderStructure( "{}/SaClassification".format(subfol)) # autoCH2Folder, autoCH2TrainingFolder, autoCH2TestFolder = self.createAutoEncoderImageFolderStructure( # "CH2AutoEncoder") (contrastCH2Folder, contrastCH2TrainingFolder, contrastCH2ValidationFolder, contrastCH2TestFolder, contrastCH2AllFolder) = self.createClassificationImageFolderStructure( "{}/CH2Classification".format(subfol)) # autoCH3Folder, autoCH3TrainingFolder, autoCH3TestFolder = self.createAutoEncoderImageFolderStructure( # "CH3AutoEncoder") (contrastCH3Folder, contrastCH3TrainingFolder, contrastCH3ValidationFolder, contrastCH3TestFolder, contrastCH3AllFolder) = self.createClassificationImageFolderStructure( "{}/CH3Classification".format(subfol)) # autoCH4Folder, autoCH4TrainingFolder, autoCH4TestFolder = self.createAutoEncoderImageFolderStructure( # "CH4AutoEncoder") (contrastCH4Folder, contrastCH4TrainingFolder, contrastCH4ValidationFolder, contrastCH4TestFolder, contrastCH4AllFolder) = self.createClassificationImageFolderStructure( "{}/CH4Classification".format(subfol)) self.patientSeperatedFolder = os.path.join(os.path.dirname(self.sourceFolder), '{}/patients'.format(subfol)) os.makedirs(self.patientSeperatedFolder) self.patientSeperatedTrainingFolder = os.path.join(self.patientSeperatedFolder, 'training') self.patientSeperatedValidationFolder = os.path.join(self.patientSeperatedFolder, 'validation') self.patientSeperatedTestFolder = os.path.join(self.patientSeperatedFolder, 'test') os.makedirs(self.patientSeperatedTrainingFolder) os.makedirs(self.patientSeperatedValidationFolder) os.makedirs(self.patientSeperatedTestFolder) for file in os.listdir(self.sourceFolder): if ".p" in file: tmpPat = pickle.load(open(os.path.join(self.sourceFolder, file), 'rb')) patho = tmpPat.pathology.strip() if "U18" in patho or "sport" in patho or "Normal" in patho: continue # elif "sport" in patho: # patho = "Sport" # elif "Normal" not in patho and "HCM" not in patho: # patho = "Other" imPatho = patho # if "sport" in patho: # imPatho = "Sport" # if "Normal" not in patho: # imPatho = "Hypertrophic" classificationReady = False if (tmpPat.contrastSaImages is not None and tmpPat.contrastLaImages.ch2Images is not None and tmpPat.contrastLaImages.ch3Images is not None and tmpPat.contrastLaImages.ch4Images is not None): classificationReady = True # if tmpPat.normalSaImages is not None: # for i in range(tmpPat.normalSaImages.shape[0]): # for j in range(tmpPat.normalSaImages.shape[1]): # self.saveImageForAutoEncoder(tmpPat.normalSaImages, tmpPat.patientID, autoSaTestFolder, # autoSaTrainingFolder, self.curSaImagePatientNum, # self.totalSaImagePatientNum, i, j) # self.curSaImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastSaImages, tmpPat.patientID, patho, contrastSaTestFolder, contrastSaValidationFolder, contrastSaTrainingFolder, 'SA', imPatho, self.curContrastSaImagePatientNum, contrastSaAllFolder, self.contrastSApathologyDict) self.curContrastSaImagePatientNum[patho] += 1 # if tmpPat.normalLaImages.ch2Images is not None: # for i in range(tmpPat.normalLaImages.ch2Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch2Images, tmpPat.patientID, # autoCH2TestFolder, # autoCH2TrainingFolder, self.curCH2ImagePatientNum, # self.totalCH2ImagePatientNum, None, i) # self.curCH2ImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastLaImages.ch2Images, tmpPat.patientID, patho, contrastCH2TestFolder, contrastCH2ValidationFolder, contrastCH2TrainingFolder, 'CH2', imPatho, self.curContrastCH2ImagePatientNum, contrastCH2AllFolder, self.contrastCH2pathologyDict) self.curContrastCH2ImagePatientNum[patho] += 1 # if tmpPat.normalLaImages.ch3Images is not None: # for i in range(tmpPat.normalLaImages.ch3Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch3Images, tmpPat.patientID, # autoCH3TestFolder, # autoCH3TrainingFolder, self.curCH3ImagePatientNum, # self.totalCH3ImagePatientNum, None, i) # self.curCH3ImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastLaImages.ch3Images, tmpPat.patientID, patho, contrastCH3TestFolder, contrastCH3ValidationFolder, contrastCH3TrainingFolder, 'CH3', imPatho, self.curContrastCH3ImagePatientNum, contrastCH3AllFolder, self.contrastCH3pathologyDict) self.curContrastCH3ImagePatientNum[patho] += 1 # if tmpPat.normalLaImages.ch4Images is not None: # for i in range(tmpPat.normalLaImages.ch4Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch4Images, tmpPat.patientID, # autoCH4TestFolder, # autoCH4TrainingFolder, self.curCH4ImagePatientNum, # self.totalCH4ImagePatientNum, None, i) # self.curCH4ImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastLaImages.ch4Images, tmpPat.patientID, patho, contrastCH4TestFolder, contrastCH4ValidationFolder, contrastCH4TrainingFolder, 'CH4', imPatho, self.curContrastCH4ImagePatientNum, contrastCH4AllFolder, self.contrastCH4pathologyDict) self.curContrastCH4ImagePatientNum[patho] += 1 self.createLabelFileFromPathoDict(contrastSaFolder, self.contrastSApathologyDict) self.createLabelFileFromPathoDict(contrastCH2Folder, self.contrastCH2pathologyDict) self.createLabelFileFromPathoDict(contrastCH3Folder, self.contrastCH3pathologyDict) self.createLabelFileFromPathoDict(contrastCH4Folder, self.contrastCH4pathologyDict) def createLabelFileFromPathoDict(self, destination, pathoDict): file = open(os.path.join(destination, "pathologies.txt"), "w") for key in pathoDict: file.write("{}\n".format(key)) file.close() if __name__ == "__main__": sourceFolder = 'D:/BME/7felev/Szakdolgozat/whole_dataset/filtered_data' imageFolderArranger = PatientToImageFolder(sourceFolder) imageFolderArranger.createImageFolderDatasets()
import os import pickle from PIL import Image class PatientToImageFolder: def __init__(self, sourceFolder): self.sourceFolder = sourceFolder # How many patient with contrast SA for each pathology (used for classification) self.contrastSApathologyDict = {} # How many patient with contrast LA for each pathology (used for classification) self.contrastCH2pathologyDict = {} self.contrastCH3pathologyDict = {} self.contrastCH4pathologyDict = {} # How many patient with SA image (used for autoencoder training) self.totalSaImagePatientNum = 0 self.curSaImagePatientNum = 0 # How many patient with LA image (used for autoencoder training) self.totalCH2ImagePatientNum = 0 self.curCH2ImagePatientNum = 0 self.totalCH3ImagePatientNum = 0 self.curCH3ImagePatientNum = 0 self.totalCH4ImagePatientNum = 0 self.curCH4ImagePatientNum = 0 self.curContrastSaImagePatientNum = {} self.curContrastCH2ImagePatientNum = {} self.curContrastCH3ImagePatientNum = {} self.curContrastCH4ImagePatientNum = {} self.collectInfo() def collectInfo(self): for file in os.listdir(self.sourceFolder): if ".p" in file: tmpPat = pickle.load(open(os.path.join(self.sourceFolder, file), 'rb')) patho = tmpPat.pathology.strip() if "U18" in patho or "sport" in patho or "Normal" in patho: continue # elif "sport" in patho: # patho = "Sport" # elif "Normal" not in patho and "HCM" not in patho: # patho = "Other" if tmpPat.normalSaImages is not None: self.totalSaImagePatientNum += 1 if (tmpPat.contrastSaImages is not None and tmpPat.contrastLaImages.ch2Images is not None and tmpPat.contrastLaImages.ch3Images is not None and tmpPat.contrastLaImages.ch4Images is not None): if patho in self.contrastSApathologyDict: self.contrastSApathologyDict[patho] += 1 else: self.contrastSApathologyDict[patho] = 1 if patho in self.contrastCH2pathologyDict: self.contrastCH2pathologyDict[patho] += 1 else: self.contrastCH2pathologyDict[patho] = 1 if patho in self.contrastCH3pathologyDict: self.contrastCH3pathologyDict[patho] += 1 else: self.contrastCH3pathologyDict[patho] = 1 if patho in self.contrastCH4pathologyDict: self.contrastCH4pathologyDict[patho] += 1 else: self.contrastCH4pathologyDict[patho] = 1 if tmpPat.normalLaImages.ch2Images is not None: self.totalCH2ImagePatientNum += 1 if tmpPat.normalLaImages.ch3Images is not None: self.totalCH3ImagePatientNum += 1 if tmpPat.normalLaImages.ch4Images is not None: self.totalCH4ImagePatientNum += 1 for key in self.contrastSApathologyDict: self.curContrastSaImagePatientNum[key] = 0 for key in self.contrastCH2pathologyDict: self.curContrastCH2ImagePatientNum[key] = 0 for key in self.contrastCH3pathologyDict: self.curContrastCH3ImagePatientNum[key] = 0 for key in self.contrastCH4pathologyDict: self.curContrastCH4ImagePatientNum[key] = 0 def convertImage(self, image_2d): # if image_2d.min() > 254: # return None # Converting image from numpy array to PIL. pil_img = Image.fromarray(image_2d) if pil_img.getbbox() is None: return None return pil_img def createAutoEncoderImageFolderStructure(self, folderName): autoFolder = os.path.join(os.path.dirname(self.sourceFolder), folderName) autoTrainingFolder = os.path.join(autoFolder, "training") autoTestFolder = os.path.join(autoFolder, "test") os.makedirs(autoTrainingFolder) os.makedirs(autoTestFolder) return autoFolder, autoTrainingFolder, autoTestFolder def createClassificationImageFolderStructure(self, folderName): classFolder = os.path.join(os.path.dirname(self.sourceFolder), folderName) classTrainingFolder = os.path.join(classFolder, "training") classValidationFolder = os.path.join(classFolder, "validation") classTestFolder = os.path.join(classFolder, "test") classAllFolder = os.path.join(classFolder, 'all') os.makedirs(classTrainingFolder) os.makedirs(classValidationFolder) os.makedirs(classTestFolder) os.makedirs(classAllFolder) return classFolder, classTrainingFolder, classValidationFolder, classTestFolder, classAllFolder def saveImageForClassification(self, image, patientId, patho, testFolder, validationFolder, trainingFolder, axis, imPatho, curPatientNum, allFolder, pathologyDict): pil_img = self.convertImage(image[:, :]) if pil_img is not None: if (curPatientNum[patho] <= pathologyDict[patho] * 0.075 or (pathologyDict[patho] * 0.85 <= curPatientNum[patho] <= pathologyDict[patho] * 0.925)): imFolder = os.path.join(testFolder, imPatho) os.makedirs(imFolder, exist_ok=True) patientFolder = os.path.join(self.patientSeperatedTestFolder, imPatho + '_' + patientId) os.makedirs(patientFolder, exist_ok=True) elif ((pathologyDict[patho] * 0.075 <= curPatientNum[patho] <= pathologyDict[patho] * 0.15) or curPatientNum[patho] >= int(pathologyDict[patho] * 0.925)): imFolder = os.path.join(validationFolder, imPatho) os.makedirs(imFolder, exist_ok=True) patientFolder = os.path.join(self.patientSeperatedValidationFolder, imPatho + '_' + patientId) os.makedirs(patientFolder, exist_ok=True) else: imFolder = os.path.join(trainingFolder, imPatho) os.makedirs(imFolder, exist_ok=True) patientFolder = os.path.join(self.patientSeperatedTrainingFolder, imPatho + '_' + patientId) os.makedirs(patientFolder, exist_ok=True) axisFolder = os.path.join(patientFolder, axis) os.makedirs(axisFolder, exist_ok=True) pil_img.save(os.path.join(imFolder, "{}.png".format(patientId))) # pil_img.save(os.path.join(allFolder, "{}.png".format(patientId))) pil_img.save(os.path.join(axisFolder, "{}.png".format(patientId))) file = open(os.path.join(patientFolder, "pathology.txt"), "w") file.write("{}\n".format(patho)) file.close() def saveImageForAutoEncoder(self, images, patientId, testFolder, trainingFolder, curPatientNum, totalPatientNum, sliceIdx, frameIdx): if sliceIdx is not None: pil_img = self.convertImage(images[sliceIdx, frameIdx, :, :]) else: pil_img = self.convertImage(images[frameIdx, :, :]) if pil_img is not None: if (curPatientNum <= totalPatientNum * 0.1 or curPatientNum >= int(totalPatientNum * 0.9)): if sliceIdx is not None: pil_img.save(os.path.join(testFolder, "{}_{}_{}.png".format(patientId, sliceIdx, frameIdx))) else: pil_img.save(os.path.join(testFolder, "{}_{}.png".format(patientId, frameIdx))) else: if sliceIdx is not None: pil_img.save(os.path.join(trainingFolder, "{}_{}_{}.png".format(patientId, sliceIdx, frameIdx))) else: pil_img.save(os.path.join(trainingFolder, "{}_{}.png".format(patientId, frameIdx))) def createImageFolderDatasets(self): subfol = "only_abnormal" # autoSaFolder, autoSaTrainingFolder, autoSaTestFolder = self.createAutoEncoderImageFolderStructure( # "SaAutoEncoder") (contrastSaFolder, contrastSaTrainingFolder, contrastSaValidationFolder, contrastSaTestFolder, contrastSaAllFolder) = self.createClassificationImageFolderStructure( "{}/SaClassification".format(subfol)) # autoCH2Folder, autoCH2TrainingFolder, autoCH2TestFolder = self.createAutoEncoderImageFolderStructure( # "CH2AutoEncoder") (contrastCH2Folder, contrastCH2TrainingFolder, contrastCH2ValidationFolder, contrastCH2TestFolder, contrastCH2AllFolder) = self.createClassificationImageFolderStructure( "{}/CH2Classification".format(subfol)) # autoCH3Folder, autoCH3TrainingFolder, autoCH3TestFolder = self.createAutoEncoderImageFolderStructure( # "CH3AutoEncoder") (contrastCH3Folder, contrastCH3TrainingFolder, contrastCH3ValidationFolder, contrastCH3TestFolder, contrastCH3AllFolder) = self.createClassificationImageFolderStructure( "{}/CH3Classification".format(subfol)) # autoCH4Folder, autoCH4TrainingFolder, autoCH4TestFolder = self.createAutoEncoderImageFolderStructure( # "CH4AutoEncoder") (contrastCH4Folder, contrastCH4TrainingFolder, contrastCH4ValidationFolder, contrastCH4TestFolder, contrastCH4AllFolder) = self.createClassificationImageFolderStructure( "{}/CH4Classification".format(subfol)) self.patientSeperatedFolder = os.path.join(os.path.dirname(self.sourceFolder), '{}/patients'.format(subfol)) os.makedirs(self.patientSeperatedFolder) self.patientSeperatedTrainingFolder = os.path.join(self.patientSeperatedFolder, 'training') self.patientSeperatedValidationFolder = os.path.join(self.patientSeperatedFolder, 'validation') self.patientSeperatedTestFolder = os.path.join(self.patientSeperatedFolder, 'test') os.makedirs(self.patientSeperatedTrainingFolder) os.makedirs(self.patientSeperatedValidationFolder) os.makedirs(self.patientSeperatedTestFolder) for file in os.listdir(self.sourceFolder): if ".p" in file: tmpPat = pickle.load(open(os.path.join(self.sourceFolder, file), 'rb')) patho = tmpPat.pathology.strip() if "U18" in patho or "sport" in patho or "Normal" in patho: continue # elif "sport" in patho: # patho = "Sport" # elif "Normal" not in patho and "HCM" not in patho: # patho = "Other" imPatho = patho # if "sport" in patho: # imPatho = "Sport" # if "Normal" not in patho: # imPatho = "Hypertrophic" classificationReady = False if (tmpPat.contrastSaImages is not None and tmpPat.contrastLaImages.ch2Images is not None and tmpPat.contrastLaImages.ch3Images is not None and tmpPat.contrastLaImages.ch4Images is not None): classificationReady = True # if tmpPat.normalSaImages is not None: # for i in range(tmpPat.normalSaImages.shape[0]): # for j in range(tmpPat.normalSaImages.shape[1]): # self.saveImageForAutoEncoder(tmpPat.normalSaImages, tmpPat.patientID, autoSaTestFolder, # autoSaTrainingFolder, self.curSaImagePatientNum, # self.totalSaImagePatientNum, i, j) # self.curSaImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastSaImages, tmpPat.patientID, patho, contrastSaTestFolder, contrastSaValidationFolder, contrastSaTrainingFolder, 'SA', imPatho, self.curContrastSaImagePatientNum, contrastSaAllFolder, self.contrastSApathologyDict) self.curContrastSaImagePatientNum[patho] += 1 # if tmpPat.normalLaImages.ch2Images is not None: # for i in range(tmpPat.normalLaImages.ch2Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch2Images, tmpPat.patientID, # autoCH2TestFolder, # autoCH2TrainingFolder, self.curCH2ImagePatientNum, # self.totalCH2ImagePatientNum, None, i) # self.curCH2ImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastLaImages.ch2Images, tmpPat.patientID, patho, contrastCH2TestFolder, contrastCH2ValidationFolder, contrastCH2TrainingFolder, 'CH2', imPatho, self.curContrastCH2ImagePatientNum, contrastCH2AllFolder, self.contrastCH2pathologyDict) self.curContrastCH2ImagePatientNum[patho] += 1 # if tmpPat.normalLaImages.ch3Images is not None: # for i in range(tmpPat.normalLaImages.ch3Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch3Images, tmpPat.patientID, # autoCH3TestFolder, # autoCH3TrainingFolder, self.curCH3ImagePatientNum, # self.totalCH3ImagePatientNum, None, i) # self.curCH3ImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastLaImages.ch3Images, tmpPat.patientID, patho, contrastCH3TestFolder, contrastCH3ValidationFolder, contrastCH3TrainingFolder, 'CH3', imPatho, self.curContrastCH3ImagePatientNum, contrastCH3AllFolder, self.contrastCH3pathologyDict) self.curContrastCH3ImagePatientNum[patho] += 1 # if tmpPat.normalLaImages.ch4Images is not None: # for i in range(tmpPat.normalLaImages.ch4Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch4Images, tmpPat.patientID, # autoCH4TestFolder, # autoCH4TrainingFolder, self.curCH4ImagePatientNum, # self.totalCH4ImagePatientNum, None, i) # self.curCH4ImagePatientNum += 1 if classificationReady: self.saveImageForClassification(tmpPat.contrastLaImages.ch4Images, tmpPat.patientID, patho, contrastCH4TestFolder, contrastCH4ValidationFolder, contrastCH4TrainingFolder, 'CH4', imPatho, self.curContrastCH4ImagePatientNum, contrastCH4AllFolder, self.contrastCH4pathologyDict) self.curContrastCH4ImagePatientNum[patho] += 1 self.createLabelFileFromPathoDict(contrastSaFolder, self.contrastSApathologyDict) self.createLabelFileFromPathoDict(contrastCH2Folder, self.contrastCH2pathologyDict) self.createLabelFileFromPathoDict(contrastCH3Folder, self.contrastCH3pathologyDict) self.createLabelFileFromPathoDict(contrastCH4Folder, self.contrastCH4pathologyDict) def createLabelFileFromPathoDict(self, destination, pathoDict): file = open(os.path.join(destination, "pathologies.txt"), "w") for key in pathoDict: file.write("{}\n".format(key)) file.close() if __name__ == "__main__": sourceFolder = 'D:/BME/7felev/Szakdolgozat/whole_dataset/filtered_data' imageFolderArranger = PatientToImageFolder(sourceFolder) imageFolderArranger.createImageFolderDatasets()
en
0.343182
# How many patient with contrast SA for each pathology (used for classification) # How many patient with contrast LA for each pathology (used for classification) # How many patient with SA image (used for autoencoder training) # How many patient with LA image (used for autoencoder training) # elif "sport" in patho: # patho = "Sport" # elif "Normal" not in patho and "HCM" not in patho: # patho = "Other" # if image_2d.min() > 254: # return None # Converting image from numpy array to PIL. # pil_img.save(os.path.join(allFolder, "{}.png".format(patientId))) # autoSaFolder, autoSaTrainingFolder, autoSaTestFolder = self.createAutoEncoderImageFolderStructure( # "SaAutoEncoder") # autoCH2Folder, autoCH2TrainingFolder, autoCH2TestFolder = self.createAutoEncoderImageFolderStructure( # "CH2AutoEncoder") # autoCH3Folder, autoCH3TrainingFolder, autoCH3TestFolder = self.createAutoEncoderImageFolderStructure( # "CH3AutoEncoder") # autoCH4Folder, autoCH4TrainingFolder, autoCH4TestFolder = self.createAutoEncoderImageFolderStructure( # "CH4AutoEncoder") # elif "sport" in patho: # patho = "Sport" # elif "Normal" not in patho and "HCM" not in patho: # patho = "Other" # if "sport" in patho: # imPatho = "Sport" # if "Normal" not in patho: # imPatho = "Hypertrophic" # if tmpPat.normalSaImages is not None: # for i in range(tmpPat.normalSaImages.shape[0]): # for j in range(tmpPat.normalSaImages.shape[1]): # self.saveImageForAutoEncoder(tmpPat.normalSaImages, tmpPat.patientID, autoSaTestFolder, # autoSaTrainingFolder, self.curSaImagePatientNum, # self.totalSaImagePatientNum, i, j) # self.curSaImagePatientNum += 1 # if tmpPat.normalLaImages.ch2Images is not None: # for i in range(tmpPat.normalLaImages.ch2Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch2Images, tmpPat.patientID, # autoCH2TestFolder, # autoCH2TrainingFolder, self.curCH2ImagePatientNum, # self.totalCH2ImagePatientNum, None, i) # self.curCH2ImagePatientNum += 1 # if tmpPat.normalLaImages.ch3Images is not None: # for i in range(tmpPat.normalLaImages.ch3Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch3Images, tmpPat.patientID, # autoCH3TestFolder, # autoCH3TrainingFolder, self.curCH3ImagePatientNum, # self.totalCH3ImagePatientNum, None, i) # self.curCH3ImagePatientNum += 1 # if tmpPat.normalLaImages.ch4Images is not None: # for i in range(tmpPat.normalLaImages.ch4Images.shape[0]): # self.saveImageForAutoEncoder(tmpPat.normalLaImages.ch4Images, tmpPat.patientID, # autoCH4TestFolder, # autoCH4TrainingFolder, self.curCH4ImagePatientNum, # self.totalCH4ImagePatientNum, None, i) # self.curCH4ImagePatientNum += 1
2.685809
3
scripts/pipeline/a06a_submission.py
Iolaum/Phi1337
0
9833
import pandas as pd import numpy as np import pickle from sklearn.cross_validation import train_test_split from sklearn.linear_model import LinearRegression from sklearn.metrics import mean_squared_error from math import sqrt from sklearn.svm import SVR from sklearn.svm import LinearSVR from sklearn.preprocessing import StandardScaler from sklearn.ensemble import RandomForestRegressor def prune(x): if x < 1: return 1 elif x > 3: return 3 else: return x def regression(reg_type, standardize_df, debug=False): # load model filename = '../../dataset/model_' + reg_type + '.pickle' lin_model = None with open(filename, 'rb') as f: lin_model = pickle.load(f) score_df_tst = pd.read_pickle('../../dataset/score_df_final_tst.pickle') # Fill NaN value # score_df = score_df.fillna(0.0) # The last column is the target X = np.array(score_df_tst) if standardize_df: print("Standardizing...") with open("../../dataset/scaler.pickle", 'rb') as handle: scaler = pickle.load(handle) X = scaler.transform(X) # Debug if debug: print("Score DataFrame") print(score_df) print("") print("Training Values") print(X) print("") print("Output Values") print(Y) print("") print("Shapes of X and Y") print(X.shape) print(Y.shape) # Debug if debug: print("XTR - XTS") print(xtr.shape) print(xts.shape) print("") print("YTR - YTS") print(ytr.shape) print(yts.shape) print("") yts_pred = lin_model.predict(X) #yts_error = sqrt(mean_squared_error(yts_pred, yts)) print("Prediction by (" + reg_type + ") on Test data have finished") # create submission file id_series = pd.read_csv('../../dataset/test.csv')['id'] submission_df = pd.DataFrame(id_series, columns=['id']) submission_df['relevance'] = yts_pred submission_df['relevance'] = submission_df['relevance'].map(lambda x: prune(x)) submission_df.to_csv('../../dataset/submission.csv', columns=['id', 'relevance'], index=False) if __name__ == "__main__": # Change between: # svr # linear # rfr regression_type = 'svr' standardize_df = True regression(regression_type, standardize_df, debug=False)
import pandas as pd import numpy as np import pickle from sklearn.cross_validation import train_test_split from sklearn.linear_model import LinearRegression from sklearn.metrics import mean_squared_error from math import sqrt from sklearn.svm import SVR from sklearn.svm import LinearSVR from sklearn.preprocessing import StandardScaler from sklearn.ensemble import RandomForestRegressor def prune(x): if x < 1: return 1 elif x > 3: return 3 else: return x def regression(reg_type, standardize_df, debug=False): # load model filename = '../../dataset/model_' + reg_type + '.pickle' lin_model = None with open(filename, 'rb') as f: lin_model = pickle.load(f) score_df_tst = pd.read_pickle('../../dataset/score_df_final_tst.pickle') # Fill NaN value # score_df = score_df.fillna(0.0) # The last column is the target X = np.array(score_df_tst) if standardize_df: print("Standardizing...") with open("../../dataset/scaler.pickle", 'rb') as handle: scaler = pickle.load(handle) X = scaler.transform(X) # Debug if debug: print("Score DataFrame") print(score_df) print("") print("Training Values") print(X) print("") print("Output Values") print(Y) print("") print("Shapes of X and Y") print(X.shape) print(Y.shape) # Debug if debug: print("XTR - XTS") print(xtr.shape) print(xts.shape) print("") print("YTR - YTS") print(ytr.shape) print(yts.shape) print("") yts_pred = lin_model.predict(X) #yts_error = sqrt(mean_squared_error(yts_pred, yts)) print("Prediction by (" + reg_type + ") on Test data have finished") # create submission file id_series = pd.read_csv('../../dataset/test.csv')['id'] submission_df = pd.DataFrame(id_series, columns=['id']) submission_df['relevance'] = yts_pred submission_df['relevance'] = submission_df['relevance'].map(lambda x: prune(x)) submission_df.to_csv('../../dataset/submission.csv', columns=['id', 'relevance'], index=False) if __name__ == "__main__": # Change between: # svr # linear # rfr regression_type = 'svr' standardize_df = True regression(regression_type, standardize_df, debug=False)
en
0.494724
# load model # Fill NaN value # score_df = score_df.fillna(0.0) # The last column is the target # Debug # Debug #yts_error = sqrt(mean_squared_error(yts_pred, yts)) # create submission file # Change between: # svr # linear # rfr
2.667833
3
Data Gathering/PythonPlottingScript.py
Carter-eng/SeniorDesign
0
9834
import numpy as np import serial import time import matplotlib.pyplot as plt def getData(): ser = serial.Serial('/dev/ttyACM7', 9600) sensorReadings = [] start = time.time() current = time.time() while current - start < 10: data =ser.readline() sensorReadings.append(float(data)) current = time.time() return sensorReadings def plotter(sensorReadings): plt.plot(sensorReadings) plt.ylabel('EEG Sensor sensorReadings') plt.show() if __name__ == '__main__': sensorReadings = getData() plotter(sensorReadings)
import numpy as np import serial import time import matplotlib.pyplot as plt def getData(): ser = serial.Serial('/dev/ttyACM7', 9600) sensorReadings = [] start = time.time() current = time.time() while current - start < 10: data =ser.readline() sensorReadings.append(float(data)) current = time.time() return sensorReadings def plotter(sensorReadings): plt.plot(sensorReadings) plt.ylabel('EEG Sensor sensorReadings') plt.show() if __name__ == '__main__': sensorReadings = getData() plotter(sensorReadings)
none
1
2.78892
3
Examples/Rich_Message_Example.py
robinvoogt/text-sdk-python
2
9835
from CMText.TextClient import TextClient # Message to be send message = 'Examples message to be send' # Media to be send media = { "mediaName": "conversational-commerce", "mediaUri": "https://www.cm.com/cdn/cm/cm.png", "mimeType": "image/png" } # AllowedChannels in this case Whatsapp allowedChannels = ['Whatsapp'] # Recipients to = ['003156789000', '002134567890'] # Instantiate client with your own api-key client = TextClient(apikey=UNIQUE_API_KEY) # Add a Rich message to the queue client.AddRichMessage(message=message, from_='pythonSDK', to=to, allowedChannels=allowedChannels, media=media) # Send the messages response = client.send() # Print response print(response.text)
from CMText.TextClient import TextClient # Message to be send message = 'Examples message to be send' # Media to be send media = { "mediaName": "conversational-commerce", "mediaUri": "https://www.cm.com/cdn/cm/cm.png", "mimeType": "image/png" } # AllowedChannels in this case Whatsapp allowedChannels = ['Whatsapp'] # Recipients to = ['003156789000', '002134567890'] # Instantiate client with your own api-key client = TextClient(apikey=UNIQUE_API_KEY) # Add a Rich message to the queue client.AddRichMessage(message=message, from_='pythonSDK', to=to, allowedChannels=allowedChannels, media=media) # Send the messages response = client.send() # Print response print(response.text)
en
0.768204
# Message to be send # Media to be send # AllowedChannels in this case Whatsapp # Recipients # Instantiate client with your own api-key # Add a Rich message to the queue # Send the messages # Print response
2.582542
3
client/audio.py
Dmitry450/asynciogame
0
9836
<filename>client/audio.py import pygame from pygame.math import Vector2 class Sound: def __init__(self, manager, snd, volume=1.0): self.manager = manager self.snd = pygame.mixer.Sound(snd) self.snd.set_volume(1.0) self.ttl = snd.get_length() self.playing = True self.snd.play() def update(self, dtime): self.ttl -= dtime if self.ttl <= 0: self.playing = False class AttachedSound(Sound): def __init__(self, manager, snd, position, volume=1.0, fade_dist=1, min_volume=0.1): super().__init__(manager, snd) if not isinstance(position, Vector2): position = Vector2(position) self.position = position self.volume = volume self.fade_dist = fade_dist self.min_volume = min_volume def update(self, dtime): super().update(dtime) if self.playing and self.manager.track_object is not None: dist = self.position.distance_to(self.manager.track_object.position) volume = self.volume*self.fade_dist/dist if volume > self.min_volume: self.snd.set_volume(volume) else: self.snd.set_volume(0) class AudioManager: def __init__(self): self.loaded = {} self.sounds = [] self.track_object = None def play_sound(self, d): name = d["name"] if self.loaded.get(name) is None: self.loaded[name] = pygame.mixer.Sound(name) if d["type"] == "normal": self.sounds.append(Sound(self, self.loaded[name], volume=d.get("volume", 1.0))) # Actually sound can be "attached_to_position" and "attached_to_entity". # To avoid adding EntityManager reference into AudioManager, "position" # will be replaced by entity.position in Connection when sound event handled. # Anyway, d["type"] will be set to "attached" elif d["type"] == "attached": self.sounds.append(AttachedSound(self, self.loaded[name], d["position"], volume=d.get("volume", 1.0), fade_dist=d.get("fade_dist", 1), min_volume=d.get("min_volume", 0.1))) def update(self, dtime): for sound in self.sounds: sound.update(dtime) if not sound.playing: self.sounds.remove(sound)
<filename>client/audio.py import pygame from pygame.math import Vector2 class Sound: def __init__(self, manager, snd, volume=1.0): self.manager = manager self.snd = pygame.mixer.Sound(snd) self.snd.set_volume(1.0) self.ttl = snd.get_length() self.playing = True self.snd.play() def update(self, dtime): self.ttl -= dtime if self.ttl <= 0: self.playing = False class AttachedSound(Sound): def __init__(self, manager, snd, position, volume=1.0, fade_dist=1, min_volume=0.1): super().__init__(manager, snd) if not isinstance(position, Vector2): position = Vector2(position) self.position = position self.volume = volume self.fade_dist = fade_dist self.min_volume = min_volume def update(self, dtime): super().update(dtime) if self.playing and self.manager.track_object is not None: dist = self.position.distance_to(self.manager.track_object.position) volume = self.volume*self.fade_dist/dist if volume > self.min_volume: self.snd.set_volume(volume) else: self.snd.set_volume(0) class AudioManager: def __init__(self): self.loaded = {} self.sounds = [] self.track_object = None def play_sound(self, d): name = d["name"] if self.loaded.get(name) is None: self.loaded[name] = pygame.mixer.Sound(name) if d["type"] == "normal": self.sounds.append(Sound(self, self.loaded[name], volume=d.get("volume", 1.0))) # Actually sound can be "attached_to_position" and "attached_to_entity". # To avoid adding EntityManager reference into AudioManager, "position" # will be replaced by entity.position in Connection when sound event handled. # Anyway, d["type"] will be set to "attached" elif d["type"] == "attached": self.sounds.append(AttachedSound(self, self.loaded[name], d["position"], volume=d.get("volume", 1.0), fade_dist=d.get("fade_dist", 1), min_volume=d.get("min_volume", 0.1))) def update(self, dtime): for sound in self.sounds: sound.update(dtime) if not sound.playing: self.sounds.remove(sound)
en
0.738524
# Actually sound can be "attached_to_position" and "attached_to_entity". # To avoid adding EntityManager reference into AudioManager, "position" # will be replaced by entity.position in Connection when sound event handled. # Anyway, d["type"] will be set to "attached"
3.056284
3
bot/ganjoor/category_choose.py
MmeK/ganjoor-telegram-bot
0
9837
# Copyright 2021 <NAME> <<EMAIL>>. # SPDX-License-Identifier: MIT # Telegram API framework core imports from collections import namedtuple from functools import partial from ganjoor.ganjoor import Ganjoor from telegram.ext import Dispatcher, CallbackContext from telegram import Update # Helper methods import from utils.logger import get_logger from utils.telegram.keyboards import category_keyboard # Telegram API framework handlers imports from telegram.ext import CallbackQueryHandler # Init logger logger = get_logger(__name__) CallbackData = namedtuple('CallbackData', "menu_name doto") def init(dispatcher: Dispatcher, ganjoor: Ganjoor): """Provide handlers initialization.""" dispatcher.add_handler(CallbackQueryHandler( partial(category_id, ganjoor=ganjoor), pattern=r'^category_*')) def category_id(update: Update, context: CallbackContext, ganjoor: Ganjoor) -> int: """Process a /start command.""" query = update.callback_query message_id = '_'.join(query.data.split('_')[2:]) cat_id = query.data.split('_')[1] cat = ganjoor.find_category_by_id(cat_id, with_poems=True) # query.answer() query.answer() context.bot.edit_message_reply_markup( inline_message_id=message_id, reply_markup=category_keyboard(cat, message_id)) # query.edit_reply_markup()
# Copyright 2021 <NAME> <<EMAIL>>. # SPDX-License-Identifier: MIT # Telegram API framework core imports from collections import namedtuple from functools import partial from ganjoor.ganjoor import Ganjoor from telegram.ext import Dispatcher, CallbackContext from telegram import Update # Helper methods import from utils.logger import get_logger from utils.telegram.keyboards import category_keyboard # Telegram API framework handlers imports from telegram.ext import CallbackQueryHandler # Init logger logger = get_logger(__name__) CallbackData = namedtuple('CallbackData', "menu_name doto") def init(dispatcher: Dispatcher, ganjoor: Ganjoor): """Provide handlers initialization.""" dispatcher.add_handler(CallbackQueryHandler( partial(category_id, ganjoor=ganjoor), pattern=r'^category_*')) def category_id(update: Update, context: CallbackContext, ganjoor: Ganjoor) -> int: """Process a /start command.""" query = update.callback_query message_id = '_'.join(query.data.split('_')[2:]) cat_id = query.data.split('_')[1] cat = ganjoor.find_category_by_id(cat_id, with_poems=True) # query.answer() query.answer() context.bot.edit_message_reply_markup( inline_message_id=message_id, reply_markup=category_keyboard(cat, message_id)) # query.edit_reply_markup()
en
0.32484
# Copyright 2021 <NAME> <<EMAIL>>. # SPDX-License-Identifier: MIT # Telegram API framework core imports # Helper methods import # Telegram API framework handlers imports # Init logger Provide handlers initialization. Process a /start command. # query.answer() # query.edit_reply_markup()
2.130924
2
python/util/md_utils.py
walterfan/snippets
1
9838
<filename>python/util/md_utils.py<gh_stars>1-10 import os import sys import struct import re import logging logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) logger = logging.getLogger(__name__) def list_to_md(str_list): output = "" for str in str_list: output = output + "* %s \n" % str return output def str_to_md_list(the_str, sep): str_list = the_str.split(sep) output = "" for str in str_list: output = output + "* %s \n" % str return output
<filename>python/util/md_utils.py<gh_stars>1-10 import os import sys import struct import re import logging logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) logger = logging.getLogger(__name__) def list_to_md(str_list): output = "" for str in str_list: output = output + "* %s \n" % str return output def str_to_md_list(the_str, sep): str_list = the_str.split(sep) output = "" for str in str_list: output = output + "* %s \n" % str return output
none
1
2.949492
3
board/main.py
Josverl/micropython-stubber
96
9839
import uos as os import time def countdown(): for i in range(5, 0, -1): print("start stubbing in {}...".format(i)) time.sleep(1) import createstubs # import stub_lvgl try: # only run import if no stubs yet os.listdir("stubs") print("stub folder was found, stubbing is not automatically started") except OSError: countdown()
import uos as os import time def countdown(): for i in range(5, 0, -1): print("start stubbing in {}...".format(i)) time.sleep(1) import createstubs # import stub_lvgl try: # only run import if no stubs yet os.listdir("stubs") print("stub folder was found, stubbing is not automatically started") except OSError: countdown()
en
0.476331
# import stub_lvgl # only run import if no stubs yet
2.329535
2
third_party/blink/tools/blinkpy/web_tests/breakpad/dump_reader_multipart_unittest.py
zipated/src
2,151
9840
# Copyright (C) 2013 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import unittest from blinkpy.common.host import Host from blinkpy.common.host_mock import MockHost from blinkpy.web_tests.breakpad.dump_reader_multipart import DumpReaderMultipart class TestDumpReaderMultipart(unittest.TestCase): _MULTIPART_DUMP = [ '--boundary', 'Content-Disposition: form-data; name="prod"', '', 'content_shell', '--boundary', 'Content-Disposition: form-data; name="pid"', '', '4711', '--boundary', 'Content-Disposition: form-data; name="upload_file_minidump"; filename="dump"', 'Content-Type: application/octet-stream', '', 'MDMP', '--boundary--', ] def test_check_generate_breakpad_symbols_actually_exists(self): host = Host() dump_reader = DumpReaderMultipart(host, build_dir=None) self.assertTrue(host.filesystem.exists(dump_reader._path_to_generate_breakpad_symbols())) def test_check_is_functional_breakpad_tools_not_found(self): host = MockHost() build_dir = "/mock-checkout/out/Debug" host.filesystem.maybe_make_directory(build_dir) dump_reader = DumpReaderMultipart(host, build_dir) dump_reader._file_extension = lambda: 'dmp' dump_reader._binaries_to_symbolize = lambda: ['content_shell'] self.assertFalse(dump_reader.check_is_functional()) def test_get_pid_from_dump(self): host = MockHost() dump_file = '/crash-dumps/dump.dmp' expected_pid = '4711' host.filesystem.write_text_file(dump_file, "\r\n".join(TestDumpReaderMultipart._MULTIPART_DUMP)) build_dir = "/mock-checkout/out/Debug" host.filesystem.maybe_make_directory(build_dir) host.filesystem.exists = lambda x: True # The mock file object returned by open_binary_file_for_reading doesn't # have readline(), however, the real File object does. host.filesystem.open_binary_file_for_reading = host.filesystem.open_text_file_for_reading dump_reader = DumpReaderMultipart(host, build_dir) dump_reader._file_extension = lambda: 'dmp' dump_reader._binaries_to_symbolize = lambda: ['content_shell'] self.assertTrue(dump_reader.check_is_functional()) self.assertEqual(expected_pid, dump_reader._get_pid_from_dump(dump_file)) def test_get_stack_from_dump(self): host = MockHost() dump_file = '/crash-dumps/dump.dmp' host.filesystem.write_text_file(dump_file, "\r\n".join(TestDumpReaderMultipart._MULTIPART_DUMP)) build_dir = "/mock-checkout/out/Debug" host.filesystem.maybe_make_directory(build_dir) host.filesystem.exists = lambda x: True # The mock file object returned by open_binary_file_for_reading doesn't # have readline(), however, the real File object does. host.filesystem.open_binary_file_for_reading = host.filesystem.open_text_file_for_reading dump_reader = DumpReaderMultipart(host, build_dir) dump_reader._file_extension = lambda: 'dmp' dump_reader._binaries_to_symbolize = lambda: ['content_shell'] self.assertTrue(dump_reader.check_is_functional()) self.assertEqual("MOCK output of child process", dump_reader._get_stack_from_dump(dump_file)) self.assertEqual(2, len(host.executive.calls)) cmd_line = " ".join(host.executive.calls[0]) self.assertIn('generate_breakpad_symbols.py', cmd_line) cmd_line = " ".join(host.executive.calls[1]) self.assertIn('minidump_stackwalk', cmd_line)
# Copyright (C) 2013 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import unittest from blinkpy.common.host import Host from blinkpy.common.host_mock import MockHost from blinkpy.web_tests.breakpad.dump_reader_multipart import DumpReaderMultipart class TestDumpReaderMultipart(unittest.TestCase): _MULTIPART_DUMP = [ '--boundary', 'Content-Disposition: form-data; name="prod"', '', 'content_shell', '--boundary', 'Content-Disposition: form-data; name="pid"', '', '4711', '--boundary', 'Content-Disposition: form-data; name="upload_file_minidump"; filename="dump"', 'Content-Type: application/octet-stream', '', 'MDMP', '--boundary--', ] def test_check_generate_breakpad_symbols_actually_exists(self): host = Host() dump_reader = DumpReaderMultipart(host, build_dir=None) self.assertTrue(host.filesystem.exists(dump_reader._path_to_generate_breakpad_symbols())) def test_check_is_functional_breakpad_tools_not_found(self): host = MockHost() build_dir = "/mock-checkout/out/Debug" host.filesystem.maybe_make_directory(build_dir) dump_reader = DumpReaderMultipart(host, build_dir) dump_reader._file_extension = lambda: 'dmp' dump_reader._binaries_to_symbolize = lambda: ['content_shell'] self.assertFalse(dump_reader.check_is_functional()) def test_get_pid_from_dump(self): host = MockHost() dump_file = '/crash-dumps/dump.dmp' expected_pid = '4711' host.filesystem.write_text_file(dump_file, "\r\n".join(TestDumpReaderMultipart._MULTIPART_DUMP)) build_dir = "/mock-checkout/out/Debug" host.filesystem.maybe_make_directory(build_dir) host.filesystem.exists = lambda x: True # The mock file object returned by open_binary_file_for_reading doesn't # have readline(), however, the real File object does. host.filesystem.open_binary_file_for_reading = host.filesystem.open_text_file_for_reading dump_reader = DumpReaderMultipart(host, build_dir) dump_reader._file_extension = lambda: 'dmp' dump_reader._binaries_to_symbolize = lambda: ['content_shell'] self.assertTrue(dump_reader.check_is_functional()) self.assertEqual(expected_pid, dump_reader._get_pid_from_dump(dump_file)) def test_get_stack_from_dump(self): host = MockHost() dump_file = '/crash-dumps/dump.dmp' host.filesystem.write_text_file(dump_file, "\r\n".join(TestDumpReaderMultipart._MULTIPART_DUMP)) build_dir = "/mock-checkout/out/Debug" host.filesystem.maybe_make_directory(build_dir) host.filesystem.exists = lambda x: True # The mock file object returned by open_binary_file_for_reading doesn't # have readline(), however, the real File object does. host.filesystem.open_binary_file_for_reading = host.filesystem.open_text_file_for_reading dump_reader = DumpReaderMultipart(host, build_dir) dump_reader._file_extension = lambda: 'dmp' dump_reader._binaries_to_symbolize = lambda: ['content_shell'] self.assertTrue(dump_reader.check_is_functional()) self.assertEqual("MOCK output of child process", dump_reader._get_stack_from_dump(dump_file)) self.assertEqual(2, len(host.executive.calls)) cmd_line = " ".join(host.executive.calls[0]) self.assertIn('generate_breakpad_symbols.py', cmd_line) cmd_line = " ".join(host.executive.calls[1]) self.assertIn('minidump_stackwalk', cmd_line)
en
0.789713
# Copyright (C) 2013 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # The mock file object returned by open_binary_file_for_reading doesn't # have readline(), however, the real File object does. # The mock file object returned by open_binary_file_for_reading doesn't # have readline(), however, the real File object does.
1.293449
1
scripts/emoji-to-scl.py
SilverWingedSeraph/sws-dotfiles
3
9841
#!/usr/bin/python3 # -*- coding: utf-8 -*- from subprocess import Popen, PIPE emojis="""⛑🏻 Helmet With White Cross, Type-1-2 ⛑🏼 Helmet With White Cross, Type-3 ⛑🏽 Helmet With White Cross, Type-4 ⛑🏾 Helmet With White Cross, Type-5 ⛑🏿 Helmet With White Cross, Type-6 💏🏻 Kiss, Type-1-2 💏🏼 Kiss, Type-3 💏🏽 Kiss, Type-4 💏🏾 Kiss, Type-5 💏🏿 Kiss, Type-6 💑🏻 Couple With Heart, Type-1-2 💑🏼 Couple With Heart, Type-3 💑🏽 Couple With Heart, Type-4 💑🏾 Couple With Heart, Type-5 💑🏿 Couple With Heart, Type-6 ⛷🏻 Skier, Type-1-2 ⛷🏼 Skier, Type-3 ⛷🏽 Skier, Type-4 ⛷🏾 Skier, Type-5 ⛷🏿 Skier, Type-6 😀 Grinning Face 😁 Grinning Face With Smiling Eyes 😂 Face With Tears of Joy 🤣 Rolling on the Floor Laughing 😃 Smiling Face With Open Mouth 😄 Smiling Face With Open Mouth & Smiling Eyes 😅 Smiling Face With Open Mouth & Cold Sweat 😆 Smiling Face With Open Mouth & Closed Eyes 😉 Winking Face 😊 Smiling Face With Smiling Eyes 😋 Face Savouring Delicious Food 😎 Smiling Face With Sunglasses 😍 Smiling Face With Heart-Eyes 😘 Face Blowing a Kiss 😗 Kissing Face 😙 Kissing Face With Smiling Eyes 😚 Kissing Face With Closed Eyes ☺ Smiling Face 🙂 Slightly Smiling Face 🤗 Hugging Face 🤩 Star-Struck 🤔 Thinking Face 🤨 Face With Raised Eyebrow 😐 Neutral Face 😑 Expressionless Face 😶 Face Without Mouth 🙄 Face With Rolling Eyes 😏 Smirking Face 😣 Persevering Face 😥 Disappointed but Relieved Face 😮 Face With Open Mouth 🤐 Zipper-Mouth Face 😯 Hushed Face 😪 Sleepy Face 😫 Tired Face 😴 Sleeping Face 😌 Relieved Face 😛 Face With Stuck-Out Tongue 😜 Face With Stuck-Out Tongue & Winking Eye 😝 Face With Stuck-Out Tongue & Closed Eyes 🤤 Drooling Face 😒 Unamused Face 😓 Face With Cold Sweat 😔 Pensive Face 😕 Confused Face 🙃 Upside-Down Face 🤑 Money-Mouth Face 😲 Astonished Face ☹ Frowning Face 🙁 Slightly Frowning Face 😖 Confounded Face 😞 Disappointed Face 😟 Worried Face 😤 Face With Steam From Nose 😢 Crying Face 😭 Loudly Crying Face 😦 Frowning Face With Open Mouth 😧 Anguished Face 😨 Fearful Face 😩 Weary Face 🤯 Exploding Head 😬 Grimacing Face 😰 Face With Open Mouth & Cold Sweat 😱 Face Screaming in Fear 😳 Flushed Face 🤪 Crazy Face 😵 Dizzy Face 😡 Pouting Face 😠 Angry Face 🤬 Face With Symbols Over Mouth 😷 Face With Medical Mask 🤒 Face With Thermometer 🤕 Face With Head-Bandage 🤢 Nauseated Face 🤮 Face Vomiting 🤧 Sneezing Face 😇 Smiling Face With Halo 🤠 Cowboy Hat Face 🤡 Clown Face 🤥 Lying Face 🤫 Shushing Face 🤭 Face With Hand Over Mouth 🧐 Face With Monocle 🤓 Nerd Face 😈 Smiling Face With Horns 👿 Angry Face With Horns 👹 Ogre 👺 Goblin 💀 Skull ☠ Skull and Crossbones 👻 Ghost 👽 Alien 👾 Alien Monster 🤖 Robot Face 💩 Pile of Poo 😺 Smiling Cat Face With Open Mouth 😸 Grinning Cat Face With Smiling Eyes 😹 Cat Face With Tears of Joy 😻 Smiling Cat Face With Heart-Eyes 😼 Cat Face With Wry Smile 😽 Kissing Cat Face With Closed Eyes 🙀 Weary Cat Face 😿 Crying Cat Face 😾 Pouting Cat Face 🙈 See-No-Evil Monkey 🙉 Hear-No-Evil Monkey 🙊 Speak-No-Evil Monkey 👶 Baby 👶🏻 Baby: Light Skin Tone 👶🏼 Baby: Medium-Light Skin Tone 👶🏽 Baby: Medium Skin Tone 👶🏾 Baby: Medium-Dark Skin Tone 👶🏿 Baby: Dark Skin Tone 🧒 Child 🧒🏻 Child: Light Skin Tone 🧒🏼 Child: Medium-Light Skin Tone 🧒🏽 Child: Medium Skin Tone 🧒🏾 Child: Medium-Dark Skin Tone 🧒🏿 Child: Dark Skin Tone 👦 Boy 👦🏻 Boy: Light Skin Tone 👦🏼 Boy: Medium-Light Skin Tone 👦🏽 Boy: Medium Skin Tone 👦🏾 Boy: Medium-Dark Skin Tone 👦🏿 Boy: Dark Skin Tone 👧 Girl 👧🏻 Girl: Light Skin Tone 👧🏼 Girl: Medium-Light Skin Tone 👧🏽 Girl: Medium Skin Tone 👧🏾 Girl: Medium-Dark Skin Tone 👧🏿 Girl: Dark Skin Tone 🧑 Adult 🧑🏻 Adult: Light Skin Tone 🧑🏼 Adult: Medium-Light Skin Tone 🧑🏽 Adult: Medium Skin Tone 🧑🏾 Adult: Medium-Dark Skin Tone 🧑🏿 Adult: Dark Skin Tone 👨 Man 👨🏻 Man: Light Skin Tone 👨🏼 Man: Medium-Light Skin Tone 👨🏽 Man: Medium Skin Tone 👨🏾 Man: Medium-Dark Skin Tone 👨🏿 Man: Dark Skin Tone 👩 Woman 👩🏻 Woman: Light Skin Tone 👩🏼 Woman: Medium-Light Skin Tone 👩🏽 Woman: Medium Skin Tone 👩🏾 Woman: Medium-Dark Skin Tone 👩🏿 Woman: Dark Skin Tone 🧓 Older Adult 🧓🏻 Older Adult: Light Skin Tone 🧓🏼 Older Adult: Medium-Light Skin Tone 🧓🏽 Older Adult: Medium Skin Tone 🧓🏾 Older Adult: Medium-Dark Skin Tone 🧓🏿 Older Adult: Dark Skin Tone 👴 Old Man 👴🏻 Old Man: Light Skin Tone 👴🏼 Old Man: Medium-Light Skin Tone 👴🏽 Old Man: Medium Skin Tone 👴🏾 Old Man: Medium-Dark Skin Tone 👴🏿 Old Man: Dark Skin Tone 👵 Old Woman 👵🏻 Old Woman: Light Skin Tone 👵🏼 Old Woman: Medium-Light Skin Tone 👵🏽 Old Woman: Medium Skin Tone 👵🏾 Old Woman: Medium-Dark Skin Tone 👵🏿 Old Woman: Dark Skin Tone 👨‍⚕️ Man Health Worker 👨🏻‍⚕️ Man Health Worker: Light Skin Tone 👨🏼‍⚕️ Man Health Worker: Medium-Light Skin Tone 👨🏽‍⚕️ Man Health Worker: Medium Skin Tone 👨🏾‍⚕️ Man Health Worker: Medium-Dark Skin Tone 👨🏿‍⚕️ Man Health Worker: Dark Skin Tone 👩‍⚕️ Woman Health Worker 👩🏻‍⚕️ Woman Health Worker: Light Skin Tone 👩🏼‍⚕️ Woman Health Worker: Medium-Light Skin Tone 👩🏽‍⚕️ Woman Health Worker: Medium Skin Tone 👩🏾‍⚕️ Woman Health Worker: Medium-Dark Skin Tone 👩🏿‍⚕️ Woman Health Worker: Dark Skin Tone 👨‍🎓 Man Student 👨🏻‍🎓 Man Student: Light Skin Tone 👨🏼‍🎓 Man Student: Medium-Light Skin Tone 👨🏽‍🎓 Man Student: Medium Skin Tone 👨🏾‍🎓 Man Student: Medium-Dark Skin Tone 👨🏿‍🎓 Man Student: Dark Skin Tone 👩‍🎓 Woman Student 👩🏻‍🎓 Woman Student: Light Skin Tone 👩🏼‍🎓 Woman Student: Medium-Light Skin Tone 👩🏽‍🎓 Woman Student: Medium Skin Tone 👩🏾‍🎓 Woman Student: Medium-Dark Skin Tone 👩🏿‍🎓 Woman Student: Dark Skin Tone 👨‍🏫 Man Teacher 👨🏻‍🏫 Man Teacher: Light Skin Tone 👨🏼‍🏫 Man Teacher: Medium-Light Skin Tone 👨🏽‍🏫 Man Teacher: Medium Skin Tone 👨🏾‍🏫 Man Teacher: Medium-Dark Skin Tone 👨🏿‍🏫 Man Teacher: Dark Skin Tone 👩‍🏫 Woman Teacher 👩🏻‍🏫 Woman Teacher: Light Skin Tone 👩🏼‍🏫 Woman Teacher: Medium-Light Skin Tone 👩🏽‍🏫 Woman Teacher: Medium Skin Tone 👩🏾‍🏫 Woman Teacher: Medium-Dark Skin Tone 👩🏿‍🏫 Woman Teacher: Dark Skin Tone 👨‍⚖️ Man Judge 👨🏻‍⚖️ Man Judge: Light Skin Tone 👨🏼‍⚖️ Man Judge: Medium-Light Skin Tone 👨🏽‍⚖️ Man Judge: Medium Skin Tone 👨🏾‍⚖️ Man Judge: Medium-Dark Skin Tone 👨🏿‍⚖️ Man Judge: Dark Skin Tone 👩‍⚖️ Woman Judge 👩🏻‍⚖️ Woman Judge: Light Skin Tone 👩🏼‍⚖️ Woman Judge: Medium-Light Skin Tone 👩🏽‍⚖️ Woman Judge: Medium Skin Tone 👩🏾‍⚖️ Woman Judge: Medium-Dark Skin Tone 👩🏿‍⚖️ Woman Judge: Dark Skin Tone 👨‍🌾 Man Farmer 👨🏻‍🌾 Man Farmer: Light Skin Tone 👨🏼‍🌾 Man Farmer: Medium-Light Skin Tone 👨🏽‍🌾 Man Farmer: Medium Skin Tone 👨🏾‍🌾 Man Farmer: Medium-Dark Skin Tone 👨🏿‍🌾 Man Farmer: Dark Skin Tone 👩‍🌾 Woman Farmer 👩🏻‍🌾 Woman Farmer: Light Skin Tone 👩🏼‍🌾 Woman Farmer: Medium-Light Skin Tone 👩🏽‍🌾 Woman Farmer: Medium Skin Tone 👩🏾‍🌾 Woman Farmer: Medium-Dark Skin Tone 👩🏿‍🌾 Woman Farmer: Dark Skin Tone 👨‍🍳 Man Cook 👨🏻‍🍳 Man Cook: Light Skin Tone 👨🏼‍🍳 Man Cook: Medium-Light Skin Tone 👨🏽‍🍳 Man Cook: Medium Skin Tone 👨🏾‍🍳 Man Cook: Medium-Dark Skin Tone 👨🏿‍🍳 Man Cook: Dark Skin Tone 👩‍🍳 Woman Cook 👩🏻‍🍳 Woman Cook: Light Skin Tone 👩🏼‍🍳 Woman Cook: Medium-Light Skin Tone 👩🏽‍🍳 Woman Cook: Medium Skin Tone 👩🏾‍🍳 Woman Cook: Medium-Dark Skin Tone 👩🏿‍🍳 Woman Cook: Dark Skin Tone 👨‍🔧 Man Mechanic 👨🏻‍🔧 Man Mechanic: Light Skin Tone 👨🏼‍🔧 Man Mechanic: Medium-Light Skin Tone 👨🏽‍🔧 Man Mechanic: Medium Skin Tone 👨🏾‍🔧 Man Mechanic: Medium-Dark Skin Tone 👨🏿‍🔧 Man Mechanic: Dark Skin Tone 👩‍🔧 Woman Mechanic 👩🏻‍🔧 Woman Mechanic: Light Skin Tone 👩🏼‍🔧 Woman Mechanic: Medium-Light Skin Tone 👩🏽‍🔧 Woman Mechanic: Medium Skin Tone 👩🏾‍🔧 Woman Mechanic: Medium-Dark Skin Tone 👩🏿‍🔧 Woman Mechanic: Dark Skin Tone 👨‍🏭 Man Factory Worker 👨🏻‍🏭 Man Factory Worker: Light Skin Tone 👨🏼‍🏭 Man Factory Worker: Medium-Light Skin Tone 👨🏽‍🏭 Man Factory Worker: Medium Skin Tone 👨🏾‍🏭 Man Factory Worker: Medium-Dark Skin Tone 👨🏿‍🏭 Man Factory Worker: Dark Skin Tone 👩‍🏭 Woman Factory Worker 👩🏻‍🏭 Woman Factory Worker: Light Skin Tone 👩🏼‍🏭 Woman Factory Worker: Medium-Light Skin Tone 👩🏽‍🏭 Woman Factory Worker: Medium Skin Tone 👩🏾‍🏭 Woman Factory Worker: Medium-Dark Skin Tone 👩🏿‍🏭 Woman Factory Worker: Dark Skin Tone 👨‍💼 Man Office Worker 👨🏻‍💼 Man Office Worker: Light Skin Tone 👨🏼‍💼 Man Office Worker: Medium-Light Skin Tone 👨🏽‍💼 Man Office Worker: Medium Skin Tone 👨🏾‍💼 Man Office Worker: Medium-Dark Skin Tone 👨🏿‍💼 Man Office Worker: Dark Skin Tone 👩‍💼 Woman Office Worker 👩🏻‍💼 Woman Office Worker: Light Skin Tone 👩🏼‍💼 Woman Office Worker: Medium-Light Skin Tone 👩🏽‍💼 Woman Office Worker: Medium Skin Tone 👩🏾‍💼 Woman Office Worker: Medium-Dark Skin Tone 👩🏿‍💼 Woman Office Worker: Dark Skin Tone 👨‍🔬 Man Scientist 👨🏻‍🔬 Man Scientist: Light Skin Tone 👨🏼‍🔬 Man Scientist: Medium-Light Skin Tone 👨🏽‍🔬 Man Scientist: Medium Skin Tone 👨🏾‍🔬 Man Scientist: Medium-Dark Skin Tone 👨🏿‍🔬 Man Scientist: Dark Skin Tone 👩‍🔬 Woman Scientist 👩🏻‍🔬 Woman Scientist: Light Skin Tone 👩🏼‍🔬 Woman Scientist: Medium-Light Skin Tone 👩🏽‍🔬 Woman Scientist: Medium Skin Tone 👩🏾‍🔬 Woman Scientist: Medium-Dark Skin Tone 👩🏿‍🔬 Woman Scientist: Dark Skin Tone 👨‍💻 Man Technologist 👨🏻‍💻 Man Technologist: Light Skin Tone 👨🏼‍💻 Man Technologist: Medium-Light Skin Tone 👨🏽‍💻 Man Technologist: Medium Skin Tone 👨🏾‍💻 Man Technologist: Medium-Dark Skin Tone 👨🏿‍💻 Man Technologist: Dark Skin Tone 👩‍💻 Woman Technologist 👩🏻‍💻 Woman Technologist: Light Skin Tone 👩🏼‍💻 Woman Technologist: Medium-Light Skin Tone 👩🏽‍💻 Woman Technologist: Medium Skin Tone 👩🏾‍💻 Woman Technologist: Medium-Dark Skin Tone 👩🏿‍💻 Woman Technologist: Dark Skin Tone 👨‍🎤 Man Singer 👨🏻‍🎤 Man Singer: Light Skin Tone 👨🏼‍🎤 Man Singer: Medium-Light Skin Tone 👨🏽‍🎤 Man Singer: Medium Skin Tone 👨🏾‍🎤 Man Singer: Medium-Dark Skin Tone 👨🏿‍🎤 Man Singer: Dark Skin Tone 👩‍🎤 Woman Singer 👩🏻‍🎤 Woman Singer: Light Skin Tone 👩🏼‍🎤 Woman Singer: Medium-Light Skin Tone 👩🏽‍🎤 Woman Singer: Medium Skin Tone 👩🏾‍🎤 Woman Singer: Medium-Dark Skin Tone 👩🏿‍🎤 Woman Singer: Dark Skin Tone 👨‍🎨 Man Artist 👨🏻‍🎨 Man Artist: Light Skin Tone 👨🏼‍🎨 Man Artist: Medium-Light Skin Tone 👨🏽‍🎨 Man Artist: Medium Skin Tone 👨🏾‍🎨 Man Artist: Medium-Dark Skin Tone 👨🏿‍🎨 Man Artist: Dark Skin Tone 👩‍🎨 Woman Artist 👩🏻‍🎨 Woman Artist: Light Skin Tone 👩🏼‍🎨 Woman Artist: Medium-Light Skin Tone 👩🏽‍🎨 Woman Artist: Medium Skin Tone 👩🏾‍🎨 Woman Artist: Medium-Dark Skin Tone 👩🏿‍🎨 Woman Artist: Dark Skin Tone 👨‍✈️ Man Pilot 👨🏻‍✈️ Man Pilot: Light Skin Tone 👨🏼‍✈️ Man Pilot: Medium-Light Skin Tone 👨🏽‍✈️ Man Pilot: Medium Skin Tone 👨🏾‍✈️ Man Pilot: Medium-Dark Skin Tone 👨🏿‍✈️ Man Pilot: Dark Skin Tone 👩‍✈️ Woman Pilot 👩🏻‍✈️ Woman Pilot: Light Skin Tone 👩🏼‍✈️ Woman Pilot: Medium-Light Skin Tone 👩🏽‍✈️ Woman Pilot: Medium Skin Tone 👩🏾‍✈️ Woman Pilot: Medium-Dark Skin Tone 👩🏿‍✈️ Woman Pilot: Dark Skin Tone 👨‍🚀 Man Astronaut 👨🏻‍🚀 Man Astronaut: Light Skin Tone 👨🏼‍🚀 Man Astronaut: Medium-Light Skin Tone 👨🏽‍🚀 Man Astronaut: Medium Skin Tone 👨🏾‍🚀 Man Astronaut: Medium-Dark Skin Tone 👨🏿‍🚀 Man Astronaut: Dark Skin Tone 👩‍🚀 Woman Astronaut 👩🏻‍🚀 Woman Astronaut: Light Skin Tone 👩🏼‍🚀 Woman Astronaut: Medium-Light Skin Tone 👩🏽‍🚀 Woman Astronaut: Medium Skin Tone 👩🏾‍🚀 Woman Astronaut: Medium-Dark Skin Tone 👩🏿‍🚀 Woman Astronaut: Dark Skin Tone 👨‍🚒 Man Firefighter 👨🏻‍🚒 Man Firefighter: Light Skin Tone 👨🏼‍🚒 Man Firefighter: Medium-Light Skin Tone 👨🏽‍🚒 Man Firefighter: Medium Skin Tone 👨🏾‍🚒 Man Firefighter: Medium-Dark Skin Tone 👨🏿‍🚒 Man Firefighter: Dark Skin Tone 👩‍🚒 Woman Firefighter 👩🏻‍🚒 Woman Firefighter: Light Skin Tone 👩🏼‍🚒 Woman Firefighter: Medium-Light Skin Tone 👩🏽‍🚒 Woman Firefighter: Medium Skin Tone 👩🏾‍🚒 Woman Firefighter: Medium-Dark Skin Tone 👩🏿‍🚒 Woman Firefighter: Dark Skin Tone 👮 Police Officer 👮🏻 Police Officer: Light Skin Tone 👮🏼 Police Officer: Medium-Light Skin Tone 👮🏽 Police Officer: Medium Skin Tone 👮🏾 Police Officer: Medium-Dark Skin Tone 👮🏿 Police Officer: Dark Skin Tone 👮‍♂️ Man Police Officer 👮🏻‍♂️ Man Police Officer: Light Skin Tone 👮🏼‍♂️ Man Police Officer: Medium-Light Skin Tone 👮🏽‍♂️ Man Police Officer: Medium Skin Tone 👮🏾‍♂️ Man Police Officer: Medium-Dark Skin Tone 👮🏿‍♂️ Man Police Officer: Dark Skin Tone 👮‍♀️ Woman Police Officer 👮🏻‍♀️ Woman Police Officer: Light Skin Tone 👮🏼‍♀️ Woman Police Officer: Medium-Light Skin Tone 👮🏽‍♀️ Woman Police Officer: Medium Skin Tone 👮🏾‍♀️ Woman Police Officer: Medium-Dark Skin Tone 👮🏿‍♀️ Woman Police Officer: Dark Skin Tone 🕵 Detective 🕵🏻 Detective: Light Skin Tone 🕵🏼 Detective: Medium-Light Skin Tone 🕵🏽 Detective: Medium Skin Tone 🕵🏾 Detective: Medium-Dark Skin Tone 🕵🏿 Detective: Dark Skin Tone 🕵️‍♂️ Man Detective 🕵🏻‍♂️ Man Detective: Light Skin Tone 🕵🏼‍♂️ Man Detective: Medium-Light Skin Tone 🕵🏽‍♂️ Man Detective: Medium Skin Tone 🕵🏾‍♂️ Man Detective: Medium-Dark Skin Tone 🕵🏿‍♂️ Man Detective: Dark Skin Tone 🕵️‍♀️ Woman Detective 🕵🏻‍♀️ Woman Detective: Light Skin Tone 🕵🏼‍♀️ Woman Detective: Medium-Light Skin Tone 🕵🏽‍♀️ Woman Detective: Medium Skin Tone 🕵🏾‍♀️ Woman Detective: Medium-Dark Skin Tone 🕵🏿‍♀️ Woman Detective: Dark Skin Tone 💂 Guard 💂🏻 Guard: Light Skin Tone 💂🏼 Guard: Medium-Light Skin Tone 💂🏽 Guard: Medium Skin Tone 💂🏾 Guard: Medium-Dark Skin Tone 💂🏿 Guard: Dark Skin Tone 💂‍♂️ Man Guard 💂🏻‍♂️ Man Guard: Light Skin Tone 💂🏼‍♂️ Man Guard: Medium-Light Skin Tone 💂🏽‍♂️ Man Guard: Medium Skin Tone 💂🏾‍♂️ Man Guard: Medium-Dark Skin Tone 💂🏿‍♂️ Man Guard: Dark Skin Tone 💂‍♀️ Woman Guard 💂🏻‍♀️ Woman Guard: Light Skin Tone 💂🏼‍♀️ Woman Guard: Medium-Light Skin Tone 💂🏽‍♀️ Woman Guard: Medium Skin Tone 💂🏾‍♀️ Woman Guard: Medium-Dark Skin Tone 💂🏿‍♀️ Woman Guard: Dark Skin Tone 👷 Construction Worker 👷🏻 Construction Worker: Light Skin Tone 👷🏼 Construction Worker: Medium-Light Skin Tone 👷🏽 Construction Worker: Medium Skin Tone 👷🏾 Construction Worker: Medium-Dark Skin Tone 👷🏿 Construction Worker: Dark Skin Tone 👷‍♂️ Man Construction Worker 👷🏻‍♂️ Man Construction Worker: Light Skin Tone 👷🏼‍♂️ Man Construction Worker: Medium-Light Skin Tone 👷🏽‍♂️ Man Construction Worker: Medium Skin Tone 👷🏾‍♂️ Man Construction Worker: Medium-Dark Skin Tone 👷🏿‍♂️ Man Construction Worker: Dark Skin Tone 👷‍♀️ Woman Construction Worker 👷🏻‍♀️ Woman Construction Worker: Light Skin Tone 👷🏼‍♀️ Woman Construction Worker: Medium-Light Skin Tone 👷🏽‍♀️ Woman Construction Worker: Medium Skin Tone 👷🏾‍♀️ Woman Construction Worker: Medium-Dark Skin Tone 👷🏿‍♀️ Woman Construction Worker: Dark Skin Tone 🤴 Prince 🤴🏻 Prince: Light Skin Tone 🤴🏼 Prince: Medium-Light Skin Tone 🤴🏽 Prince: Medium Skin Tone 🤴🏾 Prince: Medium-Dark Skin Tone 🤴🏿 Prince: Dark Skin Tone 👸 Princess 👸🏻 Princess: Light Skin Tone 👸🏼 Princess: Medium-Light Skin Tone 👸🏽 Princess: Medium Skin Tone 👸🏾 Princess: Medium-Dark Skin Tone 👸🏿 Princess: Dark Skin Tone 👳 Person Wearing Turban 👳🏻 Person Wearing Turban: Light Skin Tone 👳🏼 Person Wearing Turban: Medium-Light Skin Tone 👳🏽 Person Wearing Turban: Medium Skin Tone 👳🏾 Person Wearing Turban: Medium-Dark Skin Tone 👳🏿 Person Wearing Turban: Dark Skin Tone 👳‍♂️ Man Wearing Turban 👳🏻‍♂️ Man Wearing Turban: Light Skin Tone 👳🏼‍♂️ Man Wearing Turban: Medium-Light Skin Tone 👳🏽‍♂️ Man Wearing Turban: Medium Skin Tone 👳🏾‍♂️ Man Wearing Turban: Medium-Dark Skin Tone 👳🏿‍♂️ Man Wearing Turban: Dark Skin Tone 👳‍♀️ Woman Wearing Turban 👳🏻‍♀️ Woman Wearing Turban: Light Skin Tone 👳🏼‍♀️ Woman Wearing Turban: Medium-Light Skin Tone 👳🏽‍♀️ Woman Wearing Turban: Medium Skin Tone 👳🏾‍♀️ Woman Wearing Turban: Medium-Dark Skin Tone 👳🏿‍♀️ Woman Wearing Turban: Dark Skin Tone 👲 Man With Chinese Cap 👲🏻 Man With Chinese Cap: Light Skin Tone 👲🏼 Man With Chinese Cap: Medium-Light Skin Tone 👲🏽 Man With Chinese Cap: Medium Skin Tone 👲🏾 Man With Chinese Cap: Medium-Dark Skin Tone 👲🏿 Man With Chinese Cap: Dark Skin Tone 🧕 Woman With Headscarf 🧕🏻 Person With Headscarf: Light Skin Tone 🧕🏼 Person With Headscarf: Medium-Light Skin Tone 🧕🏽 Person With Headscarf: Medium Skin Tone 🧕🏾 Person With Headscarf: Medium-Dark Skin Tone 🧕🏿 Person With Headscarf: Dark Skin Tone 🧔 Bearded Person 🧔🏻 Bearded Person: Light Skin Tone 🧔🏼 Bearded Person: Medium-Light Skin Tone 🧔🏽 Bearded Person: Medium Skin Tone 🧔🏾 Bearded Person: Medium-Dark Skin Tone 🧔🏿 Bearded Person: Dark Skin Tone 👱 Blond-Haired Person 👱🏻 Blond-Haired Person: Light Skin Tone 👱🏼 Blond-Haired Person: Medium-Light Skin Tone 👱🏽 Blond-Haired Person: Medium Skin Tone 👱🏾 Blond-Haired Person: Medium-Dark Skin Tone 👱🏿 Blond-Haired Person: Dark Skin Tone 👱‍♂️ Blond-Haired Man 👱🏻‍♂️ Blond-Haired Man: Light Skin Tone 👱🏼‍♂️ Blond-Haired Man: Medium-Light Skin Tone 👱🏽‍♂️ Blond-Haired Man: Medium Skin Tone 👱🏾‍♂️ Blond-Haired Man: Medium-Dark Skin Tone 👱🏿‍♂️ Blond-Haired Man: Dark Skin Tone 👱‍♀️ Blond-Haired Woman 👱🏻‍♀️ Blond-Haired Woman: Light Skin Tone 👱🏼‍♀️ Blond-Haired Woman: Medium-Light Skin Tone 👱🏽‍♀️ Blond-Haired Woman: Medium Skin Tone 👱🏾‍♀️ Blond-Haired Woman: Medium-Dark Skin Tone 👱🏿‍♀️ Blond-Haired Woman: Dark Skin Tone 🤵 Man in Tuxedo 🤵🏻 Man in Tuxedo: Light Skin Tone 🤵🏼 Man in Tuxedo: Medium-Light Skin Tone 🤵🏽 Man in Tuxedo: Medium Skin Tone 🤵🏾 Man in Tuxedo: Medium-Dark Skin Tone 🤵🏿 Man in Tuxedo: Dark Skin Tone 👰 Bride With Veil 👰🏻 Bride With Veil: Light Skin Tone 👰🏼 Bride With Veil: Medium-Light Skin Tone 👰🏽 Bride With Veil: Medium Skin Tone 👰🏾 Bride With Veil: Medium-Dark Skin Tone 👰🏿 Bride With Veil: Dark Skin Tone 🤰 Pregnant Woman 🤰🏻 Pregnant Woman: Light Skin Tone 🤰🏼 Pregnant Woman: Medium-Light Skin Tone 🤰🏽 Pregnant Woman: Medium Skin Tone 🤰🏾 Pregnant Woman: Medium-Dark Skin Tone 🤰🏿 Pregnant Woman: Dark Skin Tone 🤱 Breast-Feeding 🤱🏻 Breast-Feeding: Light Skin Tone 🤱🏼 Breast-Feeding: Medium-Light Skin Tone 🤱🏽 Breast-Feeding: Medium Skin Tone 🤱🏾 Breast-Feeding: Medium-Dark Skin Tone 🤱🏿 Breast-Feeding: Dark Skin Tone 👼 Baby Angel 👼🏻 Baby Angel: Light Skin Tone 👼🏼 Baby Angel: Medium-Light Skin Tone 👼🏽 Baby Angel: Medium Skin Tone 👼🏾 Baby Angel: Medium-Dark Skin Tone 👼🏿 Baby Angel: Dark Skin Tone 🎅 Santa Claus 🎅🏻 Santa Claus: Light Skin Tone 🎅🏼 Santa Claus: Medium-Light Skin Tone 🎅🏽 Santa Claus: Medium Skin Tone 🎅🏾 Santa Claus: Medium-Dark Skin Tone 🎅🏿 Santa Claus: Dark Skin Tone 🤶 Mrs. Claus 🤶🏻 Mrs. Claus: Light Skin Tone 🤶🏼 Mrs. Claus: Medium-Light Skin Tone 🤶🏽 Mrs. Claus: Medium Skin Tone 🤶🏾 Mrs. Claus: Medium-Dark Skin Tone 🤶🏿 Mrs. Claus: Dark Skin Tone 🧙 Mage 🧙🏻 Mage: Light Skin Tone 🧙🏼 Mage: Medium-Light Skin Tone 🧙🏽 Mage: Medium Skin Tone 🧙🏾 Mage: Medium-Dark Skin Tone 🧙🏿 Mage: Dark Skin Tone 🧙‍♀️ Woman Mage 🧙🏻‍♀️ Woman Mage: Light Skin Tone 🧙🏼‍♀️ Woman Mage: Medium-Light Skin Tone 🧙🏽‍♀️ Woman Mage: Medium Skin Tone 🧙🏾‍♀️ Woman Mage: Medium-Dark Skin Tone 🧙🏿‍♀️ Woman Mage: Dark Skin Tone 🧙‍♂️ Man Mage 🧙🏻‍♂️ Man Mage: Light Skin Tone 🧙🏼‍♂️ Man Mage: Medium-Light Skin Tone 🧙🏽‍♂️ Man Mage: Medium Skin Tone 🧙🏾‍♂️ Man Mage: Medium-Dark Skin Tone 🧙🏿‍♂️ Man Mage: Dark Skin Tone 🧚 Fairy 🧚🏻 Fairy: Light Skin Tone 🧚🏼 Fairy: Medium-Light Skin Tone 🧚🏽 Fairy: Medium Skin Tone 🧚🏾 Fairy: Medium-Dark Skin Tone 🧚🏿 Fairy: Dark Skin Tone 🧚‍♀️ Woman Fairy 🧚🏻‍♀️ Woman Fairy: Light Skin Tone 🧚🏼‍♀️ Woman Fairy: Medium-Light Skin Tone 🧚🏽‍♀️ Woman Fairy: Medium Skin Tone 🧚🏾‍♀️ Woman Fairy: Medium-Dark Skin Tone 🧚🏿‍♀️ Woman Fairy: Dark Skin Tone 🧚‍♂️ Man Fairy 🧚🏻‍♂️ Man Fairy: Light Skin Tone 🧚🏼‍♂️ Man Fairy: Medium-Light Skin Tone 🧚🏽‍♂️ Man Fairy: Medium Skin Tone 🧚🏾‍♂️ Man Fairy: Medium-Dark Skin Tone 🧚🏿‍♂️ Man Fairy: Dark Skin Tone 🧛 Vampire 🧛🏻 Vampire: Light Skin Tone 🧛🏼 Vampire: Medium-Light Skin Tone 🧛🏽 Vampire: Medium Skin Tone 🧛🏾 Vampire: Medium-Dark Skin Tone 🧛🏿 Vampire: Dark Skin Tone 🧛‍♀️ Woman Vampire 🧛🏻‍♀️ Woman Vampire: Light Skin Tone 🧛🏼‍♀️ Woman Vampire: Medium-Light Skin Tone 🧛🏽‍♀️ Woman Vampire: Medium Skin Tone 🧛🏾‍♀️ Woman Vampire: Medium-Dark Skin Tone 🧛🏿‍♀️ Woman Vampire: Dark Skin Tone 🧛‍♂️ Man Vampire 🧛🏻‍♂️ Man Vampire: Light Skin Tone 🧛🏼‍♂️ Man Vampire: Medium-Light Skin Tone 🧛🏽‍♂️ Man Vampire: Medium Skin Tone 🧛🏾‍♂️ Man Vampire: Medium-Dark Skin Tone 👯🏻 Woman With Bunny Ears, Type-1-2 👯🏼 Woman With Bunny Ears, Type-3 🧛🏿‍♂️ Man Vampire: Dark Skin Tone 👯🏽 Woman With Bunny Ears, Type-4 👯🏾 Woman With Bunny Ears, Type-5 🧜 Merperson 👯🏿 Woman With Bunny Ears, Type-6 🧜🏻 Merperson: Light Skin Tone 👯🏻‍♂️ Men With Bunny Ears Partying, Type-1-2 🧜🏼 Merperson: Medium-Light Skin Tone 👯🏼‍♂️ Men With Bunny Ears Partying, Type-3 🧜🏽 Merperson: Medium Skin Tone 👯🏽‍♂️ Men With Bunny Ears Partying, Type-4 🧜🏾 Merperson: Medium-Dark Skin Tone 👯🏾‍♂️ Men With Bunny Ears Partying, Type-5 🧜🏿 Merperson: Dark Skin Tone 👯🏿‍♂️ Men With Bunny Ears Partying, Type-6 🧜‍♀️ Mermaid 👯🏻‍♀️ Women With Bunny Ears Partying, Type-1-2 🧜🏻‍♀️ Mermaid: Light Skin Tone 👯🏼‍♀️ Women With Bunny Ears Partying, Type-3 🧜🏼‍♀️ Mermaid: Medium-Light Skin Tone 👯🏽‍♀️ Women With Bunny Ears Partying, Type-4 👯🏾‍♀️ Women With Bunny Ears Partying, Type-5 🧜🏽‍♀️ Mermaid: Medium Skin Tone 👯🏿‍♀️ Women With Bunny Ears Partying, Type-6 🧜🏾‍♀️ Mermaid: Medium-Dark Skin Tone 🧜🏿‍♀️ Mermaid: Dark Skin Tone 🧜‍♂️ Merman 🧜🏻‍♂️ Merman: Light Skin Tone 🧜🏼‍♂️ Merman: Medium-Light Skin Tone 👫🏻 Man and Woman Holding Hands, Type-1-2 🧜🏽‍♂️ Merman: Medium Skin Tone 👫🏼 Man and Woman Holding Hands, Type-3 👫🏽 Man and Woman Holding Hands, Type-4 🧜🏾‍♂️ Merman: Medium-Dark Skin Tone 👫🏾 Man and Woman Holding Hands, Type-5 👫🏿 Man and Woman Holding Hands, Type-6 🧜🏿‍♂️ Merman: Dark Skin Tone 👬🏻 Two Men Holding Hands, Type-1-2 🧝 Elf 👬🏼 Two Men Holding Hands, Type-3 👬🏽 Two Men Holding Hands, Type-4 🧝🏻 Elf: Light Skin Tone 👬🏾 Two Men Holding Hands, Type-5 🧝🏼 Elf: Medium-Light Skin Tone 👬🏿 Two Men Holding Hands, Type-6 🧝🏽 Elf: Medium Skin Tone 🧝🏾 Elf: Medium-Dark Skin Tone 👭🏻 Two Women Holding Hands, Type-1-2 🧝🏿 Elf: Dark Skin Tone 🧝‍♀️ Woman Elf 👭🏼 Two Women Holding Hands, Type-3 👭🏽 Two Women Holding Hands, Type-4 🧝🏻‍♀️ Woman Elf: Light Skin Tone 👭🏾 Two Women Holding Hands, Type-5 👭🏿 Two Women Holding Hands, Type-6 🧝🏼‍♀️ Woman Elf: Medium-Light Skin Tone 🧝🏽‍♀️ Woman Elf: Medium Skin Tone 🧝🏾‍♀️ Woman Elf: Medium-Dark Skin Tone 🧝🏿‍♀️ Woman Elf: Dark Skin Tone 🧝‍♂️ Man Elf 👪🏻 Family, Type-1-2 🧝🏻‍♂️ Man Elf: Light Skin Tone 👪🏼 Family, Type-3 👪🏽 Family, Type-4 🧝🏼‍♂️ Man Elf: Medium-Light Skin Tone 👪🏾 Family, Type-5 👪🏿 Family, Type-6 🧝🏽‍♂️ Man Elf: Medium Skin Tone 🧝🏾‍♂️ Man Elf: Medium-Dark Skin Tone 🧝🏿‍♂️ Man Elf: Dark Skin Tone 🧞 Genie 🧞‍♀️ Woman Genie 🧞‍♂️ Man Genie 🧟 Zombie 🧟‍♀️ Woman Zombie 🧟‍♂️ Man Zombie 🙍 Person Frowning 🙍🏻 Person Frowning: Light Skin Tone 🙍🏼 Person Frowning: Medium-Light Skin Tone 🙍🏽 Person Frowning: Medium Skin Tone 🙍🏾 Person Frowning: Medium-Dark Skin Tone 🙍🏿 Person Frowning: Dark Skin Tone 🙍‍♂️ Man Frowning 🙍🏻‍♂️ Man Frowning: Light Skin Tone 🏻 Light Skin Tone 🏼 Medium-Light Skin Tone 🙍🏼‍♂️ Man Frowning: Medium-Light Skin Tone 🏽 Medium Skin Tone 🙍🏽‍♂️ Man Frowning: Medium Skin Tone 🏾 Medium-Dark Skin Tone 🏿 Dark Skin Tone 🙍🏾‍♂️ Man Frowning: Medium-Dark Skin Tone 🙍🏿‍♂️ Man Frowning: Dark Skin Tone 🙍‍♀️ Woman Frowning 🙍🏻‍♀️ Woman Frowning: Light Skin Tone 🙍🏼‍♀️ Woman Frowning: Medium-Light Skin Tone 🙍🏽‍♀️ Woman Frowning: Medium Skin Tone 🙍🏾‍♀️ Woman Frowning: Medium-Dark Skin Tone 🙍🏿‍♀️ Woman Frowning: Dark Skin Tone 🙎 Person Pouting 🙎🏻 Person Pouting: Light Skin Tone 🙎🏼 Person Pouting: Medium-Light Skin Tone 🙎🏽 Person Pouting: Medium Skin Tone 🙎🏾 Person Pouting: Medium-Dark Skin Tone 🙎🏿 Person Pouting: Dark Skin Tone 🙎‍♂️ Man Pouting 🙎🏻‍♂️ Man Pouting: Light Skin Tone 🙎🏼‍♂️ Man Pouting: Medium-Light Skin Tone 🙎🏽‍♂️ Man Pouting: Medium Skin Tone 🙎🏾‍♂️ Man Pouting: Medium-Dark Skin Tone 🙎🏿‍♂️ Man Pouting: Dark Skin Tone 🙎‍♀️ Woman Pouting 🙎🏻‍♀️ Woman Pouting: Light Skin Tone 🙎🏼‍♀️ Woman Pouting: Medium-Light Skin Tone 🙎🏽‍♀️ Woman Pouting: Medium Skin Tone 🙎🏾‍♀️ Woman Pouting: Medium-Dark Skin Tone 🙎🏿‍♀️ Woman Pouting: Dark Skin Tone 🙅 Person Gesturing No 🙅🏻 Person Gesturing No: Light Skin Tone 🙅🏼 Person Gesturing No: Medium-Light Skin Tone 🙅🏽 Person Gesturing No: Medium Skin Tone 🙅🏾 Person Gesturing No: Medium-Dark Skin Tone 🙅🏿 Person Gesturing No: Dark Skin Tone 🙅‍♂️ Man Gesturing No 🙅🏻‍♂️ Man Gesturing No: Light Skin Tone 🙅🏼‍♂️ Man Gesturing No: Medium-Light Skin Tone 🙅🏽‍♂️ Man Gesturing No: Medium Skin Tone 🙅🏾‍♂️ Man Gesturing No: Medium-Dark Skin Tone 🙅🏿‍♂️ Man Gesturing No: Dark Skin Tone 🙅‍♀️ Woman Gesturing No 🙅🏻‍♀️ Woman Gesturing No: Light Skin Tone 🙅🏼‍♀️ Woman Gesturing No: Medium-Light Skin Tone 🙅🏽‍♀️ Woman Gesturing No: Medium Skin Tone 🙅🏾‍♀️ Woman Gesturing No: Medium-Dark Skin Tone 🙅🏿‍♀️ Woman Gesturing No: Dark Skin Tone 🙆 Person Gesturing OK 🙆🏻 Person Gesturing OK: Light Skin Tone 🙆🏼 Person Gesturing OK: Medium-Light Skin Tone 🙆🏽 Person Gesturing OK: Medium Skin Tone 🙆🏾 Person Gesturing OK: Medium-Dark Skin Tone 🙆🏿 Person Gesturing OK: Dark Skin Tone 🙆‍♂️ Man Gesturing OK 🙆🏻‍♂️ Man Gesturing OK: Light Skin Tone 🙆🏼‍♂️ Man Gesturing OK: Medium-Light Skin Tone 🙆🏽‍♂️ Man Gesturing OK: Medium Skin Tone 🙆🏾‍♂️ Man Gesturing OK: Medium-Dark Skin Tone 🙆🏿‍♂️ Man Gesturing OK: Dark Skin Tone 🙆‍♀️ Woman Gesturing OK 🙆🏻‍♀️ Woman Gesturing OK: Light Skin Tone 🙆🏼‍♀️ Woman Gesturing OK: Medium-Light Skin Tone 🙆🏽‍♀️ Woman Gesturing OK: Medium Skin Tone 🙆🏾‍♀️ Woman Gesturing OK: Medium-Dark Skin Tone 🙆🏿‍♀️ Woman Gesturing OK: Dark Skin Tone 💁 Person Tipping Hand 💁🏻 Person Tipping Hand: Light Skin Tone 💁🏼 Person Tipping Hand: Medium-Light Skin Tone 💁🏽 Person Tipping Hand: Medium Skin Tone 💁🏾 Person Tipping Hand: Medium-Dark Skin Tone 💁🏿 Person Tipping Hand: Dark Skin Tone 💁‍♂️ Man Tipping Hand 💁🏻‍♂️ Man Tipping Hand: Light Skin Tone 💁🏼‍♂️ Man Tipping Hand: Medium-Light Skin Tone 💁🏽‍♂️ Man Tipping Hand: Medium Skin Tone 💁🏾‍♂️ Man Tipping Hand: Medium-Dark Skin Tone 💁🏿‍♂️ Man Tipping Hand: Dark Skin Tone 💁‍♀️ Woman Tipping Hand 💁🏻‍♀️ Woman Tipping Hand: Light Skin Tone 💁🏼‍♀️ Woman Tipping Hand: Medium-Light Skin Tone 💁🏽‍♀️ Woman Tipping Hand: Medium Skin Tone 💁🏾‍♀️ Woman Tipping Hand: Medium-Dark Skin Tone 💁🏿‍♀️ Woman Tipping Hand: Dark Skin Tone 🙋 Person Raising Hand 🙋🏻 Person Raising Hand: Light Skin Tone 🙋🏼 Person Raising Hand: Medium-Light Skin Tone 🙋🏽 Person Raising Hand: Medium Skin Tone 🙋🏾 Person Raising Hand: Medium-Dark Skin Tone 🙋🏿 Person Raising Hand: Dark Skin Tone 🙋‍♂️ Man Raising Hand 🙋🏻‍♂️ Man Raising Hand: Light Skin Tone 🙋🏼‍♂️ Man Raising Hand: Medium-Light Skin Tone 🙋🏽‍♂️ Man Raising Hand: Medium Skin Tone 🙋🏾‍♂️ Man Raising Hand: Medium-Dark Skin Tone 🙋🏿‍♂️ Man Raising Hand: Dark Skin Tone 🙋‍♀️ Woman Raising Hand 🙋🏻‍♀️ Woman Raising Hand: Light Skin Tone 🙋🏼‍♀️ Woman Raising Hand: Medium-Light Skin Tone 🙋🏽‍♀️ Woman Raising Hand: Medium Skin Tone 🙋🏾‍♀️ Woman Raising Hand: Medium-Dark Skin Tone 🙋🏿‍♀️ Woman Raising Hand: Dark Skin Tone 🙇 Person Bowing 🙇🏻 Person Bowing: Light Skin Tone 🙇🏼 Person Bowing: Medium-Light Skin Tone 🙇🏽 Person Bowing: Medium Skin Tone 🙇🏾 Person Bowing: Medium-Dark Skin Tone 🙇🏿 Person Bowing: Dark Skin Tone 🙇‍♂️ Man Bowing 🙇🏻‍♂️ Man Bowing: Light Skin Tone 🤝🏻 Handshake, Type-1-2 🙇🏼‍♂️ Man Bowing: Medium-Light Skin Tone 🤝🏼 Handshake, Type-3 🤝🏽 Handshake, Type-4 🙇🏽‍♂️ Man Bowing: Medium Skin Tone 🤝🏾 Handshake, Type-5 🤝🏿 Handshake, Type-6 🙇🏾‍♂️ Man Bowing: Medium-Dark Skin Tone 🙇🏿‍♂️ Man Bowing: Dark Skin Tone 🙇‍♀️ Woman Bowing 🙇🏻‍♀️ Woman Bowing: Light Skin Tone 🙇🏼‍♀️ Woman Bowing: Medium-Light Skin Tone 🙇🏽‍♀️ Woman Bowing: Medium Skin Tone 🙇🏾‍♀️ Woman Bowing: Medium-Dark Skin Tone 🙇🏿‍♀️ Woman Bowing: Dark Skin Tone 🤦 Person Facepalming 🤦🏻 Person Facepalming: Light Skin Tone 🤦🏼 Person Facepalming: Medium-Light Skin Tone 🤦🏽 Person Facepalming: Medium Skin Tone 🤦🏾 Person Facepalming: Medium-Dark Skin Tone 🤦🏿 Person Facepalming: Dark Skin Tone 🤦‍♂️ Man Facepalming 🤦🏻‍♂️ Man Facepalming: Light Skin Tone 🤦🏼‍♂️ Man Facepalming: Medium-Light Skin Tone 🤦🏽‍♂️ Man Facepalming: Medium Skin Tone 🤦🏾‍♂️ Man Facepalming: Medium-Dark Skin Tone 🤦🏿‍♂️ Man Facepalming: Dark Skin Tone 🤦‍♀️ Woman Facepalming 🤦🏻‍♀️ Woman Facepalming: Light Skin Tone 🤦🏼‍♀️ Woman Facepalming: Medium-Light Skin Tone 🤦🏽‍♀️ Woman Facepalming: Medium Skin Tone 🤦🏾‍♀️ Woman Facepalming: Medium-Dark Skin Tone 🤦🏿‍♀️ Woman Facepalming: Dark Skin Tone 🤷 Person Shrugging 🤷🏻 Person Shrugging: Light Skin Tone 🤷🏼 Person Shrugging: Medium-Light Skin Tone 🤷🏽 Person Shrugging: Medium Skin Tone 🤷🏾 Person Shrugging: Medium-Dark Skin Tone 🤷🏿 Person Shrugging: Dark Skin Tone 🤷‍♂️ Man Shrugging 🤷🏻‍♂️ Man Shrugging: Light Skin Tone 🤷🏼‍♂️ Man Shrugging: Medium-Light Skin Tone 🤷🏽‍♂️ Man Shrugging: Medium Skin Tone 🤷🏾‍♂️ Man Shrugging: Medium-Dark Skin Tone 🤷🏿‍♂️ Man Shrugging: Dark Skin Tone 🤷‍♀️ Woman Shrugging 🤷🏻‍♀️ Woman Shrugging: Light Skin Tone 🤷🏼‍♀️ Woman Shrugging: Medium-Light Skin Tone 🤷🏽‍♀️ Woman Shrugging: Medium Skin Tone 🤷🏾‍♀️ Woman Shrugging: Medium-Dark Skin Tone 🤷🏿‍♀️ Woman Shrugging: Dark Skin Tone 💆 Person Getting Massage 💆🏻 Person Getting Massage: Light Skin Tone 💆🏼 Person Getting Massage: Medium-Light Skin Tone 💆🏽 Person Getting Massage: Medium Skin Tone 💆🏾 Person Getting Massage: Medium-Dark Skin Tone 💆🏿 Person Getting Massage: Dark Skin Tone 💆‍♂️ Man Getting Massage 💆🏻‍♂️ Man Getting Massage: Light Skin Tone 💆🏼‍♂️ Man Getting Massage: Medium-Light Skin Tone 💆🏽‍♂️ Man Getting Massage: Medium Skin Tone 💆🏾‍♂️ Man Getting Massage: Medium-Dark Skin Tone 💆🏿‍♂️ Man Getting Massage: Dark Skin Tone 💆‍♀️ Woman Getting Massage 💆🏻‍♀️ Woman Getting Massage: Light Skin Tone 💆🏼‍♀️ Woman Getting Massage: Medium-Light Skin Tone 💆🏽‍♀️ Woman Getting Massage: Medium Skin Tone 💆🏾‍♀️ Woman Getting Massage: Medium-Dark Skin Tone 💆🏿‍♀️ Woman Getting Massage: Dark Skin Tone 💇 Person Getting Haircut 💇🏻 Person Getting Haircut: Light Skin Tone 💇🏼 Person Getting Haircut: Medium-Light Skin Tone 💇🏽 Person Getting Haircut: Medium Skin Tone 💇🏾 Person Getting Haircut: Medium-Dark Skin Tone 💇🏿 Person Getting Haircut: Dark Skin Tone 💇‍♂️ Man Getting Haircut 💇🏻‍♂️ Man Getting Haircut: Light Skin Tone 💇🏼‍♂️ Man Getting Haircut: Medium-Light Skin Tone 💇🏽‍♂️ Man Getting Haircut: Medium Skin Tone 💇🏾‍♂️ Man Getting Haircut: Medium-Dark Skin Tone 💇🏿‍♂️ Man Getting Haircut: Dark Skin Tone 💇‍♀️ Woman Getting Haircut 💇🏻‍♀️ Woman Getting Haircut: Light Skin Tone 💇🏼‍♀️ Woman Getting Haircut: Medium-Light Skin Tone 💇🏽‍♀️ Woman Getting Haircut: Medium Skin Tone 💇🏾‍♀️ Woman Getting Haircut: Medium-Dark Skin Tone 💇🏿‍♀️ Woman Getting Haircut: Dark Skin Tone 🚶 Person Walking 🚶🏻 Person Walking: Light Skin Tone 🚶🏼 Person Walking: Medium-Light Skin Tone 🚶🏽 Person Walking: Medium Skin Tone 🚶🏾 Person Walking: Medium-Dark Skin Tone 🚶🏿 Person Walking: Dark Skin Tone 🚶‍♂️ Man Walking 🚶🏻‍♂️ Man Walking: Light Skin Tone 🚶🏼‍♂️ Man Walking: Medium-Light Skin Tone 🚶🏽‍♂️ Man Walking: Medium Skin Tone 🚶🏾‍♂️ Man Walking: Medium-Dark Skin Tone 🚶🏿‍♂️ Man Walking: Dark Skin Tone 🚶‍♀️ Woman Walking 🚶🏻‍♀️ Woman Walking: Light Skin Tone 🚶🏼‍♀️ Woman Walking: Medium-Light Skin Tone 🚶🏽‍♀️ Woman Walking: Medium Skin Tone 🚶🏾‍♀️ Woman Walking: Medium-Dark Skin Tone 🚶🏿‍♀️ Woman Walking: Dark Skin Tone 🏃 Person Running 🏃🏻 Person Running: Light Skin Tone 🏃🏼 Person Running: Medium-Light Skin Tone 🏃🏽 Person Running: Medium Skin Tone 🏃🏾 Person Running: Medium-Dark Skin Tone 🏃🏿 Person Running: Dark Skin Tone 🏃‍♂️ Man Running 🏃🏻‍♂️ Man Running: Light Skin Tone 🏃🏼‍♂️ Man Running: Medium-Light Skin Tone 🏃🏽‍♂️ Man Running: Medium Skin Tone 🏃🏾‍♂️ Man Running: Medium-Dark Skin Tone 🏃🏿‍♂️ Man Running: Dark Skin Tone 🏃‍♀️ Woman Running 🏃🏻‍♀️ Woman Running: Light Skin Tone 🏃🏼‍♀️ Woman Running: Medium-Light Skin Tone 🏃🏽‍♀️ Woman Running: Medium Skin Tone 🏃🏾‍♀️ Woman Running: Medium-Dark Skin Tone 🏃🏿‍♀️ Woman Running: Dark Skin Tone 💃 Woman Dancing 💃🏻 Woman Dancing: Light Skin Tone 💃🏼 Woman Dancing: Medium-Light Skin Tone 💃🏽 Woman Dancing: Medium Skin Tone 💃🏾 Woman Dancing: Medium-Dark Skin Tone 💃🏿 Woman Dancing: Dark Skin Tone 🕺 Man Dancing 🕺🏻 Man Dancing: Light Skin Tone 🕺🏼 Man Dancing: Medium-Light Skin Tone 🕺🏽 Man Dancing: Medium Skin Tone 🕺🏾 Man Dancing: Medium-Dark Skin Tone 🕺🏿 Man Dancing: Dark Skin Tone 👯 People With Bunny Ears Partying 👯‍♂️ Men With Bunny Ears Partying 👯‍♀️ Women With Bunny Ears Partying 🧖 Person in Steamy Room 🧖🏻 Person in Steamy Room: Light Skin Tone 🧖🏼 Person in Steamy Room: Medium-Light Skin Tone 🧖🏽 Person in Steamy Room: Medium Skin Tone 🧖🏾 Person in Steamy Room: Medium-Dark Skin Tone 🧖🏿 Person in Steamy Room: Dark Skin Tone 🧖‍♀️ Woman in Steamy Room 🧖🏻‍♀️ Woman in Steamy Room: Light Skin Tone 🧖🏼‍♀️ Woman in Steamy Room: Medium-Light Skin Tone 🧖🏽‍♀️ Woman in Steamy Room: Medium Skin Tone 🧖🏾‍♀️ Woman in Steamy Room: Medium-Dark Skin Tone 🧖🏿‍♀️ Woman in Steamy Room: Dark Skin Tone 🧖‍♂️ Man in Steamy Room 🧖🏻‍♂️ Man in Steamy Room: Light Skin Tone 🧖🏼‍♂️ Man in Steamy Room: Medium-Light Skin Tone 🧖🏽‍♂️ Man in Steamy Room: Medium Skin Tone 🧖🏾‍♂️ Man in Steamy Room: Medium-Dark Skin Tone 🧖🏿‍♂️ Man in Steamy Room: Dark Skin Tone 🧗 Person Climbing 🧗🏻 Person Climbing: Light Skin Tone 🧗🏼 Person Climbing: Medium-Light Skin Tone 🧗🏽 Person Climbing: Medium Skin Tone 🧗🏾 Person Climbing: Medium-Dark Skin Tone 🧗🏿 Person Climbing: Dark Skin Tone 🧗‍♀️ Woman Climbing 🧗🏻‍♀️ Woman Climbing: Light Skin Tone 🧗🏼‍♀️ Woman Climbing: Medium-Light Skin Tone 🧗🏽‍♀️ Woman Climbing: Medium Skin Tone 🧗🏾‍♀️ Woman Climbing: Medium-Dark Skin Tone 🧗🏿‍♀️ Woman Climbing: Dark Skin Tone 🧗‍♂️ Man Climbing 🧗🏻‍♂️ Man Climbing: Light Skin Tone 🧗🏼‍♂️ Man Climbing: Medium-Light Skin Tone 🧗🏽‍♂️ Man Climbing: Medium Skin Tone 🧗🏾‍♂️ Man Climbing: Medium-Dark Skin Tone 🧗🏿‍♂️ Man Climbing: Dark Skin Tone 🧘 Person in Lotus Position 🧘🏻 Person in Lotus Position: Light Skin Tone 🧘🏼 Person in Lotus Position: Medium-Light Skin Tone 🧘🏽 Person in Lotus Position: Medium Skin Tone 🧘🏾 Person in Lotus Position: Medium-Dark Skin Tone 🧘🏿 Person in Lotus Position: Dark Skin Tone 🧘‍♀️ Woman in Lotus Position 🧘🏻‍♀️ Woman in Lotus Position: Light Skin Tone 🧘🏼‍♀️ Woman in Lotus Position: Medium-Light Skin Tone 🧘🏽‍♀️ Woman in Lotus Position: Medium Skin Tone 🧘🏾‍♀️ Woman in Lotus Position: Medium-Dark Skin Tone 🧘🏿‍♀️ Woman in Lotus Position: Dark Skin Tone 🧘‍♂️ Man in Lotus Position 🧘🏻‍♂️ Man in Lotus Position: Light Skin Tone 🧘🏼‍♂️ Man in Lotus Position: Medium-Light Skin Tone 🧘🏽‍♂️ Man in Lotus Position: Medium Skin Tone 🧘🏾‍♂️ Man in Lotus Position: Medium-Dark Skin Tone 🧘🏿‍♂️ Man in Lotus Position: Dark Skin Tone 🛀 Person Taking Bath 🛀🏻 Person Taking Bath: Light Skin Tone 🛀🏼 Person Taking Bath: Medium-Light Skin Tone 🛀🏽 Person Taking Bath: Medium Skin Tone 🛀🏾 Person Taking Bath: Medium-Dark Skin Tone 🛀🏿 Person Taking Bath: Dark Skin Tone 🛌 Person in Bed 🛌🏻 Person in Bed: Light Skin Tone 🛌🏼 Person in Bed: Medium-Light Skin Tone 🛌🏽 Person in Bed: Medium Skin Tone 🛌🏾 Person in Bed: Medium-Dark Skin Tone 🛌🏿 Person in Bed: Dark Skin Tone 🕴 Man in Business Suit Levitating 🕴🏻 Man in Business Suit Levitating: Light Skin Tone 🕴🏼 Man in Business Suit Levitating: Medium-Light Skin Tone 🕴🏽 Man in Business Suit Levitating: Medium Skin Tone 🕴🏾 Man in Business Suit Levitating: Medium-Dark Skin Tone 🕴🏿 Man in Business Suit Levitating: Dark Skin Tone 🗣 Speaking Head 👤 Bust in Silhouette 👥 Busts in Silhouette 🤺 Person Fencing 🏇 Horse Racing 🏇🏻 Horse Racing: Light Skin Tone 🏇🏼 Horse Racing: Medium-Light Skin Tone 🏇🏽 Horse Racing: Medium Skin Tone 🏇🏾 Horse Racing: Medium-Dark Skin Tone 🏇🏿 Horse Racing: Dark Skin Tone ⛷ Skier 🏂 Snowboarder 🏂🏻 Snowboarder: Light Skin Tone 🏂🏼 Snowboarder: Medium-Light Skin Tone 🏂🏽 Snowboarder: Medium Skin Tone 🏂🏾 Snowboarder: Medium-Dark Skin Tone 🏂🏿 Snowboarder: Dark Skin Tone 🏌 Person Golfing 🏌🏻 Person Golfing: Light Skin Tone 🏌🏼 Person Golfing: Medium-Light Skin Tone 🏌🏽 Person Golfing: Medium Skin Tone 🏌🏾 Person Golfing: Medium-Dark Skin Tone 🏌🏿 Person Golfing: Dark Skin Tone 🏌️‍♂️ Man Golfing 🏌🏻‍♂️ Man Golfing: Light Skin Tone 🏌🏼‍♂️ Man Golfing: Medium-Light Skin Tone 🏌🏽‍♂️ Man Golfing: Medium Skin Tone 🏌🏾‍♂️ Man Golfing: Medium-Dark Skin Tone 🏌🏿‍♂️ Man Golfing: Dark Skin Tone 🏌️‍♀️ Woman Golfing 🏌🏻‍♀️ Woman Golfing: Light Skin Tone 🏌🏼‍♀️ Woman Golfing: Medium-Light Skin Tone 🏌🏽‍♀️ Woman Golfing: Medium Skin Tone 🏌🏾‍♀️ Woman Golfing: Medium-Dark Skin Tone 🏌🏿‍♀️ Woman Golfing: Dark Skin Tone 🏄 Person Surfing 🏄🏻 Person Surfing: Light Skin Tone 🏄🏼 Person Surfing: Medium-Light Skin Tone 🏄🏽 Person Surfing: Medium Skin Tone 🏄🏾 Person Surfing: Medium-Dark Skin Tone 🏄🏿 Person Surfing: Dark Skin Tone 🏄‍♂️ Man Surfing 🏄🏻‍♂️ Man Surfing: Light Skin Tone 🏄🏼‍♂️ Man Surfing: Medium-Light Skin Tone 🏄🏽‍♂️ Man Surfing: Medium Skin Tone 🏄🏾‍♂️ Man Surfing: Medium-Dark Skin Tone 🏄🏿‍♂️ Man Surfing: Dark Skin Tone 🏄‍♀️ Woman Surfing 🏄🏻‍♀️ Woman Surfing: Light Skin Tone 🏄🏼‍♀️ Woman Surfing: Medium-Light Skin Tone 🏄🏽‍♀️ Woman Surfing: Medium Skin Tone 🏄🏾‍♀️ Woman Surfing: Medium-Dark Skin Tone 🏄🏿‍♀️ Woman Surfing: Dark Skin Tone 🚣 Person Rowing Boat 🚣🏻 Person Rowing Boat: Light Skin Tone 🚣🏼 Person Rowing Boat: Medium-Light Skin Tone 🚣🏽 Person Rowing Boat: Medium Skin Tone 🚣🏾 Person Rowing Boat: Medium-Dark Skin Tone 🚣🏿 Person Rowing Boat: Dark Skin Tone 🚣‍♂️ Man Rowing Boat 🚣🏻‍♂️ Man Rowing Boat: Light Skin Tone 🚣🏼‍♂️ Man Rowing Boat: Medium-Light Skin Tone 🚣🏽‍♂️ Man Rowing Boat: Medium Skin Tone 🚣🏾‍♂️ Man Rowing Boat: Medium-Dark Skin Tone 🚣🏿‍♂️ Man Rowing Boat: Dark Skin Tone 🚣‍♀️ Woman Rowing Boat 🚣🏻‍♀️ Woman Rowing Boat: Light Skin Tone 🚣🏼‍♀️ Woman Rowing Boat: Medium-Light Skin Tone 🚣🏽‍♀️ Woman Rowing Boat: Medium Skin Tone 🚣🏾‍♀️ Woman Rowing Boat: Medium-Dark Skin Tone 🚣🏿‍♀️ Woman Rowing Boat: Dark Skin Tone 🏊 Person Swimming 🏊🏻 Person Swimming: Light Skin Tone 🏊🏼 Person Swimming: Medium-Light Skin Tone 🏊🏽 Person Swimming: Medium Skin Tone 🏊🏾 Person Swimming: Medium-Dark Skin Tone 🏊🏿 Person Swimming: Dark Skin Tone 🏊‍♂️ Man Swimming 🏊🏻‍♂️ Man Swimming: Light Skin Tone 🏊🏼‍♂️ Man Swimming: Medium-Light Skin Tone 🏊🏽‍♂️ Man Swimming: Medium Skin Tone 🏊🏾‍♂️ Man Swimming: Medium-Dark Skin Tone 🏊🏿‍♂️ Man Swimming: Dark Skin Tone 🏊‍♀️ Woman Swimming 🏊🏻‍♀️ Woman Swimming: Light Skin Tone 🏊🏼‍♀️ Woman Swimming: Medium-Light Skin Tone 🏊🏽‍♀️ Woman Swimming: Medium Skin Tone 🏊🏾‍♀️ Woman Swimming: Medium-Dark Skin Tone 🏊🏿‍♀️ Woman Swimming: Dark Skin Tone ⛹ Person Bouncing Ball ⛹🏻 Person Bouncing Ball: Light Skin Tone ⛹🏼 Person Bouncing Ball: Medium-Light Skin Tone ⛹🏽 Person Bouncing Ball: Medium Skin Tone ⛹🏾 Person Bouncing Ball: Medium-Dark Skin Tone ⛹🏿 Person Bouncing Ball: Dark Skin Tone ⛹️‍♂️ Man Bouncing Ball ⛹🏻‍♂️ Man Bouncing Ball: Light Skin Tone ⛹🏼‍♂️ Man Bouncing Ball: Medium-Light Skin Tone ⛹🏽‍♂️ Man Bouncing Ball: Medium Skin Tone ⛹🏾‍♂️ Man Bouncing Ball: Medium-Dark Skin Tone ⛹🏿‍♂️ Man Bouncing Ball: Dark Skin Tone ⛹️‍♀️ Woman Bouncing Ball ⛹🏻‍♀️ Woman Bouncing Ball: Light Skin Tone ⛹🏼‍♀️ Woman Bouncing Ball: Medium-Light Skin Tone ⛹🏽‍♀️ Woman Bouncing Ball: Medium Skin Tone ⛹🏾‍♀️ Woman Bouncing Ball: Medium-Dark Skin Tone ⛹🏿‍♀️ Woman Bouncing Ball: Dark Skin Tone 🏋 Person Lifting Weights 🏋🏻 Person Lifting Weights: Light Skin Tone 🏋🏼 Person Lifting Weights: Medium-Light Skin Tone 🏋🏽 Person Lifting Weights: Medium Skin Tone 🏋🏾 Person Lifting Weights: Medium-Dark Skin Tone 🏋🏿 Person Lifting Weights: Dark Skin Tone 🏋️‍♂️ Man Lifting Weights 🏋🏻‍♂️ Man Lifting Weights: Light Skin Tone 🏋🏼‍♂️ Man Lifting Weights: Medium-Light Skin Tone 🏋🏽‍♂️ Man Lifting Weights: Medium Skin Tone 🏋🏾‍♂️ Man Lifting Weights: Medium-Dark Skin Tone 🏋🏿‍♂️ Man Lifting Weights: Dark Skin Tone 🏋️‍♀️ Woman Lifting Weights 🏋🏻‍♀️ Woman Lifting Weights: Light Skin Tone 🏋🏼‍♀️ Woman Lifting Weights: Medium-Light Skin Tone 🏋🏽‍♀️ Woman Lifting Weights: Medium Skin Tone 🏋🏾‍♀️ Woman Lifting Weights: Medium-Dark Skin Tone 🏋🏿‍♀️ Woman Lifting Weights: Dark Skin Tone 🚴 Person Biking 🚴🏻 Person Biking: Light Skin Tone 🚴🏼 Person Biking: Medium-Light Skin Tone 🚴🏽 Person Biking: Medium Skin Tone 🚴🏾 Person Biking: Medium-Dark Skin Tone 🚴🏿 Person Biking: Dark Skin Tone 🚴‍♂️ Man Biking 🚴🏻‍♂️ Man Biking: Light Skin Tone 🚴🏼‍♂️ Man Biking: Medium-Light Skin Tone 🚴🏽‍♂️ Man Biking: Medium Skin Tone 🚴🏾‍♂️ Man Biking: Medium-Dark Skin Tone 🚴🏿‍♂️ Man Biking: Dark Skin Tone 🚴‍♀️ Woman Biking 🚴🏻‍♀️ Woman Biking: Light Skin Tone 🚴🏼‍♀️ Woman Biking: Medium-Light Skin Tone 🚴🏽‍♀️ Woman Biking: Medium Skin Tone 🚴🏾‍♀️ Woman Biking: Medium-Dark Skin Tone 🚴🏿‍♀️ Woman Biking: Dark Skin Tone 🚵 Person Mountain Biking 🚵🏻 Person Mountain Biking: Light Skin Tone 🚵🏼 Person Mountain Biking: Medium-Light Skin Tone 🚵🏽 Person Mountain Biking: Medium Skin Tone 🚵🏾 Person Mountain Biking: Medium-Dark Skin Tone 🚵🏿 Person Mountain Biking: Dark Skin Tone 🚵‍♂️ Man Mountain Biking 🚵🏻‍♂️ Man Mountain Biking: Light Skin Tone 🚵🏼‍♂️ Man Mountain Biking: Medium-Light Skin Tone 🚵🏽‍♂️ Man Mountain Biking: Medium Skin Tone 🚵🏾‍♂️ Man Mountain Biking: Medium-Dark Skin Tone 🚵🏿‍♂️ Man Mountain Biking: Dark Skin Tone 🚵‍♀️ Woman Mountain Biking 🚵🏻‍♀️ Woman Mountain Biking: Light Skin Tone 🚵🏼‍♀️ Woman Mountain Biking: Medium-Light Skin Tone 🚵🏽‍♀️ Woman Mountain Biking: Medium Skin Tone 🚵🏾‍♀️ Woman Mountain Biking: Medium-Dark Skin Tone 🚵🏿‍♀️ Woman Mountain Biking: Dark Skin Tone 🏎 Racing Car 🏍 Motorcycle 🤸 Person Cartwheeling 🤸🏻 Person Cartwheeling: Light Skin Tone 🤸🏼 Person Cartwheeling: Medium-Light Skin Tone 🤸🏽 Person Cartwheeling: Medium Skin Tone 🤸🏾 Person Cartwheeling: Medium-Dark Skin Tone 🤸🏿 Person Cartwheeling: Dark Skin Tone 🤸‍♂️ Man Cartwheeling 🤸🏻‍♂️ Man Cartwheeling: Light Skin Tone 🤸🏼‍♂️ Man Cartwheeling: Medium-Light Skin Tone 🤸🏽‍♂️ Man Cartwheeling: Medium Skin Tone 🤸🏾‍♂️ Man Cartwheeling: Medium-Dark Skin Tone 🤸🏿‍♂️ Man Cartwheeling: Dark Skin Tone 🤸‍♀️ Woman Cartwheeling 🤸🏻‍♀️ Woman Cartwheeling: Light Skin Tone 🤸🏼‍♀️ Woman Cartwheeling: Medium-Light Skin Tone 🤸🏽‍♀️ Woman Cartwheeling: Medium Skin Tone 🤸🏾‍♀️ Woman Cartwheeling: Medium-Dark Skin Tone 🤸🏿‍♀️ Woman Cartwheeling: Dark Skin Tone 🤼 People Wrestling 🤼‍♂️ Men Wrestling 🤼‍♀️ Women Wrestling 🤽 Person Playing Water Polo 🤽🏻 Person Playing Water Polo: Light Skin Tone 🤽🏼 Person Playing Water Polo: Medium-Light Skin Tone 🤽🏽 Person Playing Water Polo: Medium Skin Tone 🤽🏾 Person Playing Water Polo: Medium-Dark Skin Tone 🤽🏿 Person Playing Water Polo: Dark Skin Tone 🤽‍♂️ Man Playing Water Polo 🤽🏻‍♂️ Man Playing Water Polo: Light Skin Tone 🤽🏼‍♂️ Man Playing Water Polo: Medium-Light Skin Tone 🤽🏽‍♂️ Man Playing Water Polo: Medium Skin Tone 🤽🏾‍♂️ Man Playing Water Polo: Medium-Dark Skin Tone 🤽🏿‍♂️ Man Playing Water Polo: Dark Skin Tone 🤽‍♀️ Woman Playing Water Polo 🤽🏻‍♀️ Woman Playing Water Polo: Light Skin Tone 🤽🏼‍♀️ Woman Playing Water Polo: Medium-Light Skin Tone 🤽🏽‍♀️ Woman Playing Water Polo: Medium Skin Tone 🤽🏾‍♀️ Woman Playing Water Polo: Medium-Dark Skin Tone 🤽🏿‍♀️ Woman Playing Water Polo: Dark Skin Tone 🤾 Person Playing Handball 🤾🏻 Person Playing Handball: Light Skin Tone 🤾🏼 Person Playing Handball: Medium-Light Skin Tone 🤾🏽 Person Playing Handball: Medium Skin Tone 🤾🏾 Person Playing Handball: Medium-Dark Skin Tone 🤾🏿 Person Playing Handball: Dark Skin Tone 🤾‍♂️ Man Playing Handball 🤾🏻‍♂️ Man Playing Handball: Light Skin Tone 🤾🏼‍♂️ Man Playing Handball: Medium-Light Skin Tone 🤾🏽‍♂️ Man Playing Handball: Medium Skin Tone 🤾🏾‍♂️ Man Playing Handball: Medium-Dark Skin Tone 🤾🏿‍♂️ Man Playing Handball: Dark Skin Tone 🤾‍♀️ Woman Playing Handball 🤾🏻‍♀️ Woman Playing Handball: Light Skin Tone 🤾🏼‍♀️ Woman Playing Handball: Medium-Light Skin Tone 🤾🏽‍♀️ Woman Playing Handball: Medium Skin Tone 🤾🏾‍♀️ Woman Playing Handball: Medium-Dark Skin Tone 🤾🏿‍♀️ Woman Playing Handball: Dark Skin Tone 🤹 Person Juggling 🤹🏻 Person Juggling: Light Skin Tone 🤹🏼 Person Juggling: Medium-Light Skin Tone 🤹🏽 Person Juggling: Medium Skin Tone 🤹🏾 Person Juggling: Medium-Dark Skin Tone 🤹🏿 Person Juggling: Dark Skin Tone 🤹‍♂️ Man Juggling 🤹🏻‍♂️ Man Juggling: Light Skin Tone 🤹🏼‍♂️ Man Juggling: Medium-Light Skin Tone 🤹🏽‍♂️ Man Juggling: Medium Skin Tone 🤹🏾‍♂️ Man Juggling: Medium-Dark Skin Tone 🤹🏿‍♂️ Man Juggling: Dark Skin Tone 🤹‍♀️ Woman Juggling 🤹🏻‍♀️ Woman Juggling: Light Skin Tone 🤹🏼‍♀️ Woman Juggling: Medium-Light Skin Tone 🤹🏽‍♀️ Woman Juggling: Medium Skin Tone 🤹🏾‍♀️ Woman Juggling: Medium-Dark Skin Tone 🤹🏿‍♀️ Woman Juggling: Dark Skin Tone 🤼🏻 Wrestlers, Type-1-2 🤼🏼 Wrestlers, Type-3 👫 Man and Woman Holding Hands 🤼🏽 Wrestlers, Type-4 👬 Two Men Holding Hands 🤼🏾 Wrestlers, Type-5 👭 Two Women Holding Hands 🤼🏿 Wrestlers, Type-6 💏 Kiss 👩‍❤️‍💋‍👨 Kiss: Woman, Man 🤼🏻‍♂️ Men Wrestling, Type-1-2 🤼🏼‍♂️ Men Wrestling, Type-3 🤼🏽‍♂️ Men Wrestling, Type-4 👨‍❤️‍💋‍👨 Kiss: Man, Man 🤼🏾‍♂️ Men Wrestling, Type-5 🤼🏿‍♂️ Men Wrestling, Type-6 👩‍❤️‍💋‍👩 Kiss: Woman, Woman 🤼🏻‍♀️ Women Wrestling, Type-1-2 💑 Couple With Heart 🤼🏼‍♀️ Women Wrestling, Type-3 👩‍❤️‍👨 Couple With Heart: Woman, Man 🤼🏽‍♀️ Women Wrestling, Type-4 🤼🏾‍♀️ Women Wrestling, Type-5 👨‍❤️‍👨 Couple With Heart: Man, Man 🤼🏿‍♀️ Women Wrestling, Type-6 👩‍❤️‍👩 Couple With Heart: Woman, Woman 👪 Family 👨‍👩‍👦 Family: Man, Woman, Boy 👨‍👩‍👧 Family: Man, Woman, Girl 👨‍👩‍👧‍👦 Family: Man, Woman, Girl, Boy 👨‍👩‍👦‍👦 Family: Man, Woman, Boy, Boy 👨‍👩‍👧‍👧 Family: Man, Woman, Girl, Girl 👨‍👨‍👦 Family: Man, Man, Boy 👨‍👨‍👧 Family: Man, Man, Girl 👨‍👨‍👧‍👦 Family: Man, Man, Girl, Boy 👨‍👨‍👦‍👦 Family: Man, Man, Boy, Boy 👨‍👨‍👧‍👧 Family: Man, Man, Girl, Girl 👩‍👩‍👦 Family: Woman, Woman, Boy 👩‍👩‍👧 Family: Woman, Woman, Girl 👩‍👩‍👧‍👦 Family: Woman, Woman, Girl, Boy 👩‍👩‍👦‍👦 Family: Woman, Woman, Boy, Boy 👩‍👩‍👧‍👧 Family: Woman, Woman, Girl, Girl 👨‍👦 Family: Man, Boy 👨‍👦‍👦 Family: Man, Boy, Boy 👨‍👧 Family: Man, Girl 👨‍👧‍👦 Family: Man, Girl, Boy 👨‍👧‍👧 Family: Man, Girl, Girl 👩‍👦 Family: Woman, Boy 👩‍👦‍👦 Family: Woman, Boy, Boy 👩‍👧 Family: Woman, Girl 👩‍👧‍👦 Family: Woman, Girl, Boy 👩‍👧‍👧 Family: Woman, Girl, Girl 🤳 Selfie 🤳🏻 Selfie: Light Skin Tone 🤳🏼 Selfie: Medium-Light Skin Tone 🤳🏽 Selfie: Medium Skin Tone 🤳🏾 Selfie: Medium-Dark Skin Tone 🤳🏿 Selfie: Dark Skin Tone 💪 Flexed Biceps 💪🏻 Flexed Biceps: Light Skin Tone 💪🏼 Flexed Biceps: Medium-Light Skin Tone 💪🏽 Flexed Biceps: Medium Skin Tone 💪🏾 Flexed Biceps: Medium-Dark Skin Tone 💪🏿 Flexed Biceps: Dark Skin Tone 👈 Backhand Index Pointing Left 👈🏻 Backhand Index Pointing Left: Light Skin Tone 👈🏼 Backhand Index Pointing Left: Medium-Light Skin Tone 👈🏽 Backhand Index Pointing Left: Medium Skin Tone 👈🏾 Backhand Index Pointing Left: Medium-Dark Skin Tone 👈🏿 Backhand Index Pointing Left: Dark Skin Tone 👉 Backhand Index Pointing Right 👉🏻 Backhand Index Pointing Right: Light Skin Tone 👉🏼 Backhand Index Pointing Right: Medium-Light Skin Tone 👉🏽 Backhand Index Pointing Right: Medium Skin Tone 👉🏾 Backhand Index Pointing Right: Medium-Dark Skin Tone 👉🏿 Backhand Index Pointing Right: Dark Skin Tone ☝ Index Pointing Up ☝🏻 Index Pointing Up: Light Skin Tone ☝🏼 Index Pointing Up: Medium-Light Skin Tone ☝🏽 Index Pointing Up: Medium Skin Tone ☝🏾 Index Pointing Up: Medium-Dark Skin Tone ☝🏿 Index Pointing Up: Dark Skin Tone 👆 Backhand Index Pointing Up 👆🏻 Backhand Index Pointing Up: Light Skin Tone 👆🏼 Backhand Index Pointing Up: Medium-Light Skin Tone 👆🏽 Backhand Index Pointing Up: Medium Skin Tone 👆🏾 Backhand Index Pointing Up: Medium-Dark Skin Tone 👆🏿 Backhand Index Pointing Up: Dark Skin Tone 🖕 Middle Finger 🖕🏻 Middle Finger: Light Skin Tone 🖕🏼 Middle Finger: Medium-Light Skin Tone 🖕🏽 Middle Finger: Medium Skin Tone 🖕🏾 Middle Finger: Medium-Dark Skin Tone 🖕🏿 Middle Finger: Dark Skin Tone 👇 Backhand Index Pointing Down 👇🏻 Backhand Index Pointing Down: Light Skin Tone 👇🏼 Backhand Index Pointing Down: Medium-Light Skin Tone 👇🏽 Backhand Index Pointing Down: Medium Skin Tone 👇🏾 Backhand Index Pointing Down: Medium-Dark Skin Tone 👇🏿 Backhand Index Pointing Down: Dark Skin Tone ✌ Victory Hand ✌🏻 Victory Hand: Light Skin Tone ✌🏼 Victory Hand: Medium-Light Skin Tone ✌🏽 Victory Hand: Medium Skin Tone ✌🏾 Victory Hand: Medium-Dark Skin Tone ✌🏿 Victory Hand: Dark Skin Tone 🤞 Crossed Fingers 🤞🏻 Crossed Fingers: Light Skin Tone 🤞🏼 Crossed Fingers: Medium-Light Skin Tone 🤞🏽 Crossed Fingers: Medium Skin Tone 🤞🏾 Crossed Fingers: Medium-Dark Skin Tone 🤞🏿 Crossed Fingers: Dark Skin Tone 🖖 Vulcan Salute 🖖🏻 Vulcan Salute: Light Skin Tone 🖖🏼 Vulcan Salute: Medium-Light Skin Tone 🖖🏽 Vulcan Salute: Medium Skin Tone 🖖🏾 Vulcan Salute: Medium-Dark Skin Tone 🖖🏿 Vulcan Salute: Dark Skin Tone 🤘 Sign of the Horns 🤘🏻 Sign of the Horns: Light Skin Tone 🤘🏼 Sign of the Horns: Medium-Light Skin Tone 🤘🏽 Sign of the Horns: Medium Skin Tone 🤘🏾 Sign of the Horns: Medium-Dark Skin Tone 🤘🏿 Sign of the Horns: Dark Skin Tone 🤙 Call Me Hand 🤙🏻 Call Me Hand: Light Skin Tone 🤙🏼 Call Me Hand: Medium-Light Skin Tone 🤙🏽 Call Me Hand: Medium Skin Tone 🤙🏾 Call Me Hand: Medium-Dark Skin Tone 🤙🏿 Call Me Hand: Dark Skin Tone 🖐 Raised Hand With Fingers Splayed 🖐🏻 Raised Hand With Fingers Splayed: Light Skin Tone 🖐🏼 Raised Hand With Fingers Splayed: Medium-Light Skin Tone 🖐🏽 Raised Hand With Fingers Splayed: Medium Skin Tone 🖐🏾 Raised Hand With Fingers Splayed: Medium-Dark Skin Tone 🖐🏿 Raised Hand With Fingers Splayed: Dark Skin Tone ✋ Raised Hand ✋🏻 Raised Hand: Light Skin Tone ✋🏼 Raised Hand: Medium-Light Skin Tone ✋🏽 Raised Hand: Medium Skin Tone ✋🏾 Raised Hand: Medium-Dark Skin Tone ✋🏿 Raised Hand: Dark Skin Tone 👌 OK Hand 👌🏻 OK Hand: Light Skin Tone 👌🏼 OK Hand: Medium-Light Skin Tone 👌🏽 OK Hand: Medium Skin Tone 👌🏾 OK Hand: Medium-Dark Skin Tone 👌🏿 OK Hand: Dark Skin Tone 👍 Thumbs Up 👍🏻 Thumbs Up: Light Skin Tone 👍🏼 Thumbs Up: Medium-Light Skin Tone 👍🏽 Thumbs Up: Medium Skin Tone 👍🏾 Thumbs Up: Medium-Dark Skin Tone 👍🏿 Thumbs Up: Dark Skin Tone 👎 Thumbs Down 👎🏻 Thumbs Down: Light Skin Tone 👎🏼 Thumbs Down: Medium-Light Skin Tone 👎🏽 Thumbs Down: Medium Skin Tone 👎🏾 Thumbs Down: Medium-Dark Skin Tone 👎🏿 Thumbs Down: Dark Skin Tone ✊ Raised Fist ✊🏻 Raised Fist: Light Skin Tone ✊🏼 Raised Fist: Medium-Light Skin Tone ✊🏽 Raised Fist: Medium Skin Tone ✊🏾 Raised Fist: Medium-Dark Skin Tone ✊🏿 Raised Fist: Dark Skin Tone 👊 Oncoming Fist 👊🏻 Oncoming Fist: Light Skin Tone 👊🏼 Oncoming Fist: Medium-Light Skin Tone 👊🏽 Oncoming Fist: Medium Skin Tone 👊🏾 Oncoming Fist: Medium-Dark Skin Tone 👊🏿 Oncoming Fist: Dark Skin Tone 🤛 Left-Facing Fist 🤛🏻 Left-Facing Fist: Light Skin Tone 🤛🏼 Left-Facing Fist: Medium-Light Skin Tone 🤛🏽 Left-Facing Fist: Medium Skin Tone 🤛🏾 Left-Facing Fist: Medium-Dark Skin Tone 🤛🏿 Left-Facing Fist: Dark Skin Tone 🤜 Right-Facing Fist 🤜🏻 Right-Facing Fist: Light Skin Tone 🤜🏼 Right-Facing Fist: Medium-Light Skin Tone 🤜🏽 Right-Facing Fist: Medium Skin Tone 🤜🏾 Right-Facing Fist: Medium-Dark Skin Tone 🤜🏿 Right-Facing Fist: Dark Skin Tone 🤚 Raised Back of Hand 🤚🏻 Raised Back of Hand: Light Skin Tone 🤚🏼 Raised Back of Hand: Medium-Light Skin Tone 🤚🏽 Raised Back of Hand: Medium Skin Tone 🤚🏾 Raised Back of Hand: Medium-Dark Skin Tone 🤚🏿 Raised Back of Hand: Dark Skin Tone 👋 Waving Hand 👋🏻 Waving Hand: Light Skin Tone 👋🏼 Waving Hand: Medium-Light Skin Tone 👋🏽 Waving Hand: Medium Skin Tone 👋🏾 Waving Hand: Medium-Dark Skin Tone 👋🏿 Waving Hand: Dark Skin Tone 🤟 Love-You Gesture 🤟🏻 Love-You Gesture: Light Skin Tone 🤟🏼 Love-You Gesture: Medium-Light Skin Tone 🤟🏽 Love-You Gesture: Medium Skin Tone 🤟🏾 Love-You Gesture: Medium-Dark Skin Tone 🤟🏿 Love-You Gesture: Dark Skin Tone ✍ Writing Hand ✍🏻 Writing Hand: Light Skin Tone ✍🏼 Writing Hand: Medium-Light Skin Tone ✍🏽 Writing Hand: Medium Skin Tone ✍🏾 Writing Hand: Medium-Dark Skin Tone ✍🏿 Writing Hand: Dark Skin Tone 👏 Clapping Hands 👏🏻 Clapping Hands: Light Skin Tone 👏🏼 Clapping Hands: Medium-Light Skin Tone 👏🏽 Clapping Hands: Medium Skin Tone 👏🏾 Clapping Hands: Medium-Dark Skin Tone 👏🏿 Clapping Hands: Dark Skin Tone 👐 Open Hands 👐🏻 Open Hands: Light Skin Tone 👐🏼 Open Hands: Medium-Light Skin Tone 👐🏽 Open Hands: Medium Skin Tone 👐🏾 Open Hands: Medium-Dark Skin Tone 👐🏿 Open Hands: Dark Skin Tone 🙌 Raising Hands 🙌🏻 Raising Hands: Light Skin Tone 🙌🏼 Raising Hands: Medium-Light Skin Tone 🙌🏽 Raising Hands: Medium Skin Tone 🙌🏾 Raising Hands: Medium-Dark Skin Tone 🙌🏿 Raising Hands: Dark Skin Tone 🤲 Palms Up Together 🤲🏻 Palms Up Together: Light Skin Tone 🤲🏼 Palms Up Together: Medium-Light Skin Tone 🤲🏽 Palms Up Together: Medium Skin Tone 🤲🏾 Palms Up Together: Medium-Dark Skin Tone 🤲🏿 Palms Up Together: Dark Skin Tone 🙏 Folded Hands 🙏🏻 Folded Hands: Light Skin Tone 🙏🏼 Folded Hands: Medium-Light Skin Tone 🙏🏽 Folded Hands: Medium Skin Tone 🙏🏾 Folded Hands: Medium-Dark Skin Tone 🙏🏿 Folded Hands: Dark Skin Tone 🤝 Handshake 💅 Nail Polish 💅🏻 Nail Polish: Light Skin Tone 💅🏼 Nail Polish: Medium-Light Skin Tone 💅🏽 Nail Polish: Medium Skin Tone 💅🏾 Nail Polish: Medium-Dark Skin Tone 💅🏿 Nail Polish: Dark Skin Tone 👂 Ear 👂🏻 Ear: Light Skin Tone 👂🏼 Ear: Medium-Light Skin Tone 👂🏽 Ear: Medium Skin Tone 👂🏾 Ear: Medium-Dark Skin Tone 👂🏿 Ear: Dark Skin Tone 👃 Nose 👃🏻 Nose: Light Skin Tone 👃🏼 Nose: Medium-Light Skin Tone 👃🏽 Nose: Medium Skin Tone 👃🏾 Nose: Medium-Dark Skin Tone 👃🏿 Nose: Dark Skin Tone 👣 Footprints 👀 Eyes 👁 Eye 👁️‍🗨️ Eye in Speech Bubble 🧠 Brain 👅 Tongue 👄 Mouth 💋 Kiss Mark 💘 Heart With Arrow ❤ Red Heart 💓 Beating Heart 💔 Broken Heart 💕 Two Hearts 💖 Sparkling Heart 💗 Growing Heart 💙 Blue Heart 💚 Green Heart 💛 Yellow Heart 🧡 Orange Heart 💜 Purple Heart 🖤 Black Heart 💝 Heart With Ribbon 💞 Revolving Hearts 💟 Heart Decoration ❣ Heavy Heart Exclamation 💌 Love Letter 💤 Zzz 💢 Anger Symbol 💣 Bomb 💥 Collision 💦 Sweat Droplets 💨 Dashing Away 💫 Dizzy 💬 Speech Balloon 🗨 Left Speech Bubble 🗯 Right Anger Bubble 💭 Thought Balloon 🕳 Hole 👓 Glasses 🕶 Sunglasses 👔 Necktie 👕 T-Shirt 👖 Jeans 🧣 Scarf 🧤 Gloves 🧥 Coat 🧦 Socks 👗 Dress 👘 Kimono 👙 Bikini 👚 Woman’s Clothes 👛 Purse 👜 Handbag 👝 Clutch Bag 🛍 Shopping Bags 🎒 School Backpack 👞 Man’s Shoe 👟 Running Shoe 👠 High-Heeled Shoe 👡 Woman’s Sandal 👢 Woman’s Boot 👑 Crown 👒 Woman’s Hat 🎩 Top Hat 🎓 Graduation Cap 🧢 Billed Cap ⛑ Rescue Worker’s Helmet 📿 Prayer Beads 💄 Lipstick 💍 Ring 💎 Gem Stone 🐵 Monkey Face 🐒 Monkey 🦍 Gorilla 🐶 Dog Face 🐕 Dog 🐩 Poodle 🐺 Wolf Face 🦊 Fox Face 🐱 Cat Face 🐈 Cat 🦁 Lion Face 🐯 Tiger Face 🐅 Tiger 🐆 Leopard 🐴 Horse Face 🐎 Horse 🦄 Unicorn Face 🦓 Zebra 🦌 Deer 🐮 Cow Face 🐂 Ox 🐃 Water Buffalo 🐄 Cow 🐷 Pig Face 🐖 Pig 🐗 Boar 🐽 Pig Nose 🐏 Ram 🐑 Ewe 🐐 Goat 🐪 Camel 🐫 Two-Hump Camel 🦒 Giraffe 🐘 Elephant 🦏 Rhinoceros 🐭 Mouse Face 🐁 Mouse 🐀 Rat 🐹 Hamster Face 🐰 Rabbit Face 🐇 Rabbit 🐿 Chipmunk 🦔 Hedgehog 🦇 Bat 🐻 Bear Face 🐨 Koala 🐼 Panda Face 🐾 Paw Prints 🦃 Turkey 🐔 Chicken 🐓 Rooster 🐣 Hatching Chick 🐤 Baby Chick 🐥 Front-Facing Baby Chick 🐦 Bird 🐧 Penguin 🕊 Dove 🦅 Eagle 🦆 Duck 🦉 Owl 🐸 Frog Face 🐊 Crocodile 🐢 Turtle 🦎 Lizard 🐍 Snake 🐲 Dragon Face 🐉 Dragon 🦕 Sauropod 🦖 T-Rex 🐳 Spouting Whale 🐋 Whale 🐬 Dolphin 🐟 Fish 🐠 Tropical Fish 🐡 Blowfish 🦈 Shark 🐙 Octopus 🐚 Spiral Shell 🦀 Crab 🦐 Shrimp 🦑 Squid 🐌 Snail 🦋 Butterfly 🐛 Bug 🐜 Ant 🐝 Honeybee 🐞 Lady Beetle 🦗 Cricket 🕷 Spider 🕸 Spider Web 🦂 Scorpion 💐 Bouquet 🌸 Cherry Blossom 💮 White Flower 🏵 Rosette 🌹 Rose 🥀 Wilted Flower 🌺 Hibiscus 🌻 Sunflower 🌼 Blossom 🌷 Tulip 🌱 Seedling 🌲 Evergreen Tree 🌳 Deciduous Tree 🌴 Palm Tree 🌵 Cactus 🌾 Sheaf of Rice 🌿 Herb ☘ Shamrock 🍀 Four Leaf Clover 🍁 Maple Leaf 🍂 Fallen Leaf 🍃 Leaf Fluttering in Wind 🍇 Grapes 🍈 Melon 🍉 Watermelon 🍊 Tangerine 🍋 Lemon 🍌 Banana 🍍 Pineapple 🍎 Red Apple 🍏 Green Apple 🍐 Pear 🍑 Peach 🍒 Cherries 🍓 Strawberry 🥝 Kiwi Fruit 🍅 Tomato 🥥 Coconut 🥑 Avocado 🍆 Eggplant 🥔 Potato 🥕 Carrot 🌽 Ear of Corn 🌶 Hot Pepper 🥒 Cucumber 🥦 Broccoli 🍄 Mushroom 🥜 Peanuts 🌰 Chestnut 🍞 Bread 🥐 Croissant 🥖 Baguette Bread 🥨 Pretzel 🥞 Pancakes 🧀 Cheese Wedge 🍖 Meat on Bone 🍗 Poultry Leg 🥩 Cut of Meat 🥓 Bacon 🍔 Hamburger 🍟 French Fries 🍕 Pizza 🌭 Hot Dog 🥪 Sandwich 🌮 Taco 🌯 Burrito 🥙 Stuffed Flatbread 🥚 Egg 🍳 Cooking 🥘 Shallow Pan of Food 🍲 Pot of Food 🥣 Bowl With Spoon 🥗 Green Salad 🍿 Popcorn 🥫 Canned Food 🍱 Bento Box 🍘 Rice Cracker 🍙 Rice Ball 🍚 Cooked Rice 🍛 Curry Rice 🍜 Steaming Bowl 🍝 Spaghetti 🍠 Roasted Sweet Potato 🍢 Oden 🍣 Sushi 🍤 Fried Shrimp 🍥 Fish Cake With Swirl 🍡 Dango 🥟 Dumpling 🥠 Fortune Cookie 🥡 Takeout Box 🍦 Soft Ice Cream 🍧 Shaved Ice 🍨 Ice Cream 🍩 Doughnut 🍪 Cookie 🎂 Birthday Cake 🍰 Shortcake 🥧 Pie 🍫 Chocolate Bar 🍬 Candy 🍭 Lollipop 🍮 Custard 🍯 Honey Pot 🍼 Baby Bottle 🥛 Glass of Milk ☕ Hot Beverage 🍵 Teacup Without Handle 🍶 Sake 🍾 Bottle With Popping Cork 🍷 Wine Glass 🍸 Cocktail Glass 🍹 Tropical Drink 🍺 Beer Mug 🍻 Clinking Beer Mugs 🥂 Clinking Glasses 🥃 Tumbler Glass 🥤 Cup With Straw 🥢 Chopsticks 🍽 Fork and Knife With Plate 🍴 Fork and Knife 🥄 Spoon 🔪 Kitchen Knife 🏺 Amphora 🌍 Globe Showing Europe-Africa 🌎 Globe Showing Americas 🌏 Globe Showing Asia-Australia 🌐 Globe With Meridians 🗺 World Map 🗾 Map of Japan 🏔 Snow-Capped Mountain ⛰ Mountain 🌋 Volcano 🗻 Mount Fuji 🏕 Camping 🏖 Beach With Umbrella 🏜 Desert 🏝 Desert Island 🏞 National Park 🏟 Stadium 🏛 Classical Building 🏗 Building Construction 🏘 House 🏙 Cityscape 🏚 Derelict House 🏠 House 🏡 House With Garden 🏢 Office Building 🏣 Japanese Post Office 🏤 Post Office 🏥 Hospital 🏦 Bank 🏨 Hotel 🏩 Love Hotel 🏪 Convenience Store 🏫 School 🏬 Department Store 🏭 Factory 🏯 Japanese Castle 🏰 Castle 💒 Wedding 🗼 Tokyo Tower 🗽 Statue of Liberty ⛪ Church 🕌 Mosque 🕍 Synagogue ⛩ Shinto Shrine 🕋 Kaaba ⛲ Fountain ⛺ Tent 🌁 Foggy 🌃 Night With Stars 🌄 Sunrise Over Mountains 🌅 Sunrise 🌆 Cityscape at Dusk 🌇 Sunset 🌉 Bridge at Night ♨ Hot Springs 🌌 Milky Way 🎠 Carousel Horse 🎡 Ferris Wheel 🎢 Roller Coaster 💈 Barber Pole 🎪 Circus Tent 🎭 Performing Arts 🖼 Framed Picture 🎨 Artist Palette 🎰 Slot Machine 🚂 Locomotive 🚃 Railway Car 🚄 High-Speed Train 🚅 High-Speed Train With Bullet Nose 🚆 Train 🚇 Metro 🚈 Light Rail 🚉 Station 🚊 Tram 🚝 Monorail 🚞 Mountain Railway 🚋 Tram Car 🚌 Bus 🚍 Oncoming Bus 🚎 Trolleybus 🚐 Minibus 🚑 Ambulance 🚒 Fire Engine 🚓 Police Car 🚔 Oncoming Police Car 🚕 Taxi 🚖 Oncoming Taxi 🚗 Automobile 🚘 Oncoming Automobile 🚙 Sport Utility Vehicle 🚚 Delivery Truck 🚛 Articulated Lorry 🚜 Tractor 🚲 Bicycle 🛴 Kick Scooter 🛵 Motor Scooter 🚏 Bus Stop 🛣 Motorway 🛤 Railway Track ⛽ Fuel Pump 🚨 Police Car Light 🚥 Horizontal Traffic Light 🚦 Vertical Traffic Light 🚧 Construction 🛑 Stop Sign ⚓ Anchor ⛵ Sailboat 🛶 Canoe 🚤 Speedboat 🛳 Passenger Ship ⛴ Ferry 🛥 Motor Boat 🚢 Ship ✈ Airplane 🛩 Small Airplane 🛫 Airplane Departure 🛬 Airplane Arrival 💺 Seat 🚁 Helicopter 🚟 Suspension Railway 🚠 Mountain Cableway 🚡 Aerial Tramway 🛰 Satellite 🚀 Rocket 🛸 Flying Saucer 🛎 Bellhop Bell 🚪 Door 🛏 Bed 🛋 Couch and Lamp 🚽 Toilet 🚿 Shower 🛁 Bathtub ⌛ Hourglass ⏳ Hourglass With Flowing Sand ⌚ Watch ⏰ Alarm Clock ⏱ Stopwatch ⏲ Timer Clock 🕰 Mantelpiece Clock 🕛 Twelve O’clock 🕧 Twelve-Thirty 🕐 One O’clock 🕜 One-Thirty 🕑 Two O’clock 🕝 Two-Thirty 🕒 Three O’clock 🕞 Three-Thirty 🕓 Four O’clock 🕟 Four-Thirty 🕔 Five O’clock 🕠 Five-Thirty 🕕 Six O’clock 🕡 Six-Thirty 🕖 Seven O’clock 🕢 Seven-Thirty 🕗 Eight O’clock 🕣 Eight-Thirty 🕘 Nine O’clock 🕤 Nine-Thirty 🕙 Ten O’clock 🕥 Ten-Thirty 🕚 Eleven O’clock 🕦 Eleven-Thirty 🌑 New Moon 🌒 Waxing Crescent Moon 🌓 First Quarter Moon 🌔 Waxing Gibbous Moon 🌕 Full Moon 🌖 Waning Gibbous Moon 🌗 Last Quarter Moon 🌘 Waning Crescent Moon 🌙 Crescent Moon 🌚 New Moon Face 🌛 First Quarter Moon With Face 🌜 Last Quarter Moon With Face 🌡 Thermometer ☀ Sun 🌝 Full Moon With Face 🌞 Sun With Face ⭐ White Medium Star 🌟 Glowing Star 🌠 Shooting Star ☁ Cloud ⛅ Sun Behind Cloud ⛈ Cloud With Lightning and Rain 🌤 Sun Behind Small Cloud 🌥 Sun Behind Large Cloud 🌦 Sun Behind Rain Cloud 🌧 Cloud With Rain 🌨 Cloud With Snow 🌩 Cloud With Lightning 🌪 Tornado 🌫 Fog 🌬 Wind Face 🌀 Cyclone 🌈 Rainbow 🌂 Closed Umbrella ☂ Umbrella ☔ Umbrella With Rain Drops ⛱ Umbrella on Ground ⚡ High Voltage ❄ Snowflake ☃ Snowman ⛄ Snowman Without Snow ☄ Comet 🔥 Fire 💧 Droplet 🌊 Water Wave 🎃 Jack-O-Lantern 🎄 Christmas Tree 🎆 Fireworks 🎇 Sparkler ✨ Sparkles 🎈 Balloon 🎉 Party Popper 🎊 Confetti Ball 🎋 Tanabata Tree 🎍 Pine Decoration 🎎 Japanese Dolls 🎏 Carp Streamer 🎐 Wind Chime 🎑 Moon Viewing Ceremony 🎀 Ribbon 🎁 Wrapped Gift 🎗 Reminder Ribbon 🎟 Admission Tickets 🎫 Ticket 🎖 Military Medal 🏆 Trophy 🏅 Sports Medal 🥇 1st Place Medal 🥈 2nd Place Medal 🥉 3rd Place Medal ⚽ Soccer Ball ⚾ Baseball 🏀 Basketball 🏐 Volleyball 🏈 American Football 🏉 Rugby Football 🎾 Tennis 🎱 Pool 8 Ball 🎳 Bowling 🏏 Cricket 🏑 Field Hockey 🏒 Ice Hockey 🏓 Ping Pong 🏸 Badminton 🥊 Boxing Glove 🥋 Martial Arts Uniform 🥅 Goal Net 🎯 Direct Hit ⛳ Flag in Hole ⛸ Ice Skate 🎣 Fishing Pole 🎽 Running Shirt 🎿 Skis 🛷 Sled 🥌 Curling Stone 🎮 Video Game 🕹 Joystick 🎲 Game Die ♠ Spade Suit ♥ Heart Suit ♦ Diamond Suit ♣ Club Suit 🃏 Joker 🀄 Mahjong Red Dragon 🎴 Flower Playing Cards 🔇 Muted Speaker 🔈 Speaker Low Volume 🔉 Speaker Medium Volume 🔊 Speaker High Volume 📢 Loudspeaker 📣 Megaphone 📯 Postal Horn 🔔 Bell 🔕 Bell With Slash 🎼 Musical Score 🎵 Musical Note 🎶 Musical Notes 🎙 Studio Microphone 🎚 Level Slider 🎛 Control Knobs 🎤 Microphone 🎧 Headphone 📻 Radio 🎷 Saxophone 🎸 Guitar 🎹 Musical Keyboard 🎺 Trumpet 🎻 Violin 🥁 Drum 📱 Mobile Phone 📲 Mobile Phone With Arrow ☎ Telephone 📞 Telephone Receiver 📟 Pager 📠 Fax Machine 🔋 Battery 🔌 Electric Plug 💻 Laptop Computer 🖥 Desktop Computer 🖨 Printer ⌨ Keyboard 🖱 Computer Mouse 🖲 Trackball 💽 Computer Disk 💾 Floppy Disk 💿 Optical Disk 📀 DVD 🎥 Movie Camera 🎞 Film Frames 📽 Film Projector 🎬 Clapper Board 📺 Television 📷 Camera 📸 Camera With Flash 📹 Video Camera 📼 Videocassette 🔍 Left-Pointing Magnifying Glass 🔎 Right-Pointing Magnifying Glass 🔬 Microscope 🔭 Telescope 📡 Satellite Antenna 🕯 Candle 💡 Light Bulb 🔦 Flashlight 🏮 Red Paper Lantern 📔 Notebook With Decorative Cover 📕 Closed Book 📖 Open Book 📗 Green Book 📘 Blue Book 📙 Orange Book 📚 Books 📓 Notebook 📒 Ledger 📃 Page With Curl 📜 Scroll 📄 Page Facing Up 📰 Newspaper 🗞 Rolled-Up Newspaper 📑 Bookmark Tabs 🔖 Bookmark 🏷 Label 💰 Money Bag 💴 Yen Banknote 💵 Dollar Banknote 💶 Euro Banknote 💷 Pound Banknote 💸 Money With Wings 💳 Credit Card 💹 Chart Increasing With Yen 💱 Currency Exchange 💲 Heavy Dollar Sign ✉ Envelope 📧 E-Mail 📨 Incoming Envelope 📩 Envelope With Arrow 📤 Outbox Tray 📥 Inbox Tray 📦 Package 📫 Closed Mailbox With Raised Flag 📪 Closed Mailbox With Lowered Flag 📬 Open Mailbox With Raised Flag 📭 Open Mailbox With Lowered Flag 📮 Postbox 🗳 Ballot Box With Ballot ✏ Pencil ✒ Black Nib 🖋 Fountain Pen 🖊 Pen 🖌 Paintbrush 🖍 Crayon 📝 Memo 💼 Briefcase 📁 File Folder 📂 Open File Folder 🗂 Card Index Dividers 📅 Calendar 📆 Tear-Off Calendar 🗒 Spiral Notepad 🗓 Spiral Calendar 📇 Card Index 📈 Chart Increasing 📉 Chart Decreasing 📊 Bar Chart 📋 Clipboard 📌 Pushpin 📍 Round Pushpin 📎 Paperclip 🖇 Linked Paperclips 📏 Straight Ruler 📐 Triangular Ruler ✂ Scissors 🗃 Card File Box 🗄 File Cabinet 🗑 Wastebasket 🔒 Locked 🔓 Unlocked 🔏 Locked With Pen 🔐 Locked With Key 🔑 Key 🗝 Old Key 🔨 Hammer ⛏ Pick ⚒ Hammer and Pick 🛠 Hammer and Wrench 🗡 Dagger ⚔ Crossed Swords 🔫 Pistol 🏹 Bow and Arrow 🛡 Shield 🔧 Wrench 🔩 Nut and Bolt ⚙ Gear 🗜 Clamp ⚗ Alembic ⚖ Balance Scale 🔗 Link ⛓ Chains 💉 Syringe 💊 Pill 🚬 Cigarette ⚰ Coffin ⚱ Funeral Urn 🗿 Moai 🛢 Oil Drum 🔮 Crystal Ball 🛒 Shopping Cart 🏧 Atm Sign 🚮 Litter in Bin Sign 🚰 Potable Water ♿ Wheelchair Symbol 🚹 Men’s Room 🚺 Women’s Room 🚻 Restroom 🚼 Baby Symbol 🚾 Water Closet 🛂 Passport Control 🛃 Customs 🛄 Baggage Claim 🛅 Left Luggage ⚠ Warning 🚸 Children Crossing ⛔ No Entry 🚫 Prohibited 🚳 No Bicycles 🚭 No Smoking 🚯 No Littering 🚱 Non-Potable Water 🚷 No Pedestrians 📵 No Mobile Phones 🔞 No One Under Eighteen ☢ Radioactive ☣ Biohazard ⬆ Up Arrow ↗ Up-Right Arrow ➡ Right Arrow ↘ Down-Right Arrow ⬇ Down Arrow ↙ Down-Left Arrow ⬅ Left Arrow ↖ Up-Left Arrow ↕ Up-Down Arrow ↔ Left-Right Arrow ↩ Right Arrow Curving Left ↪ Left Arrow Curving Right ⤴ Right Arrow Curving Up ⤵ Right Arrow Curving Down 🔃 Clockwise Vertical Arrows 🔄 Anticlockwise Arrows Button 🔙 Back Arrow 🔚 End Arrow 🔛 On! Arrow 🔜 Soon Arrow 🔝 Top Arrow 🛐 Place of Worship ⚛ Atom Symbol 🕉 Om ✡ Star of David ☸ Wheel of Dharma ☯ Yin Yang ✝ Latin Cross ☦ Orthodox Cross ☪ Star and Crescent ☮ Peace Symbol 🕎 Menorah 🔯 Dotted Six-Pointed Star ♈ Aries ♉ Taurus ♊ Gemini ♋ Cancer ♌ Leo ♍ Virgo ♎ Libra ♏ Scorpius ♐ Sagittarius ♑ Capricorn ♒ Aquarius ♓ Pisces ⛎ Ophiuchus 🔀 Shuffle Tracks Button 🔁 Repeat Button 🔂 Repeat Single Button ▶ Play Button ⏩ Fast-Forward Button ⏭ Next Track Button ⏯ Play or Pause Button ◀ Reverse Button ⏪ Fast Reverse Button ⏮ Last Track Button 🔼 Up Button ⏫ Fast Up Button 🔽 Down Button ⏬ Fast Down Button ⏸ Pause Button ⏹ Stop Button ⏺ Record Button ⏏ Eject Button 🎦 Cinema 🔅 Dim Button 🔆 Bright Button 📶 Antenna Bars 📳 Vibration Mode 📴 Mobile Phone Off ♀ Female Sign ♂ Male Sign ⚕ Medical Symbol ♻ Recycling Symbol ⚜ Fleur-De-Lis 🔱 Trident Emblem 📛 Name Badge 🔰 Japanese Symbol for Beginner ⭕ Heavy Large Circle ✅ White Heavy Check Mark ☑ Ballot Box With Check ✔ Heavy Check Mark ✖ Heavy Multiplication X ❌ Cross Mark ❎ Cross Mark Button ➕ Heavy Plus Sign ➖ Heavy Minus Sign ➗ Heavy Division Sign ➰ Curly Loop ➿ Double Curly Loop 〽 Part Alternation Mark ✳ Eight-Spoked Asterisk ✴ Eight-Pointed Star ❇ Sparkle ‼ Double Exclamation Mark ⁉ Exclamation Question Mark ❓ Question Mark ❔ White Question Mark ❕ White Exclamation Mark ❗ Exclamation Mark 〰 Wavy Dash © Copyright ® Registered ™ Trade Mark #️⃣ Keycap Number Sign *️⃣ Keycap Asterisk 0️⃣ Keycap Digit Zero 1️⃣ Keycap Digit One 2️⃣ Keycap Digit Two 3️⃣ Keycap Digit Three 4️⃣ Keycap Digit Four 5️⃣ Keycap Digit Five 6️⃣ Keycap Digit Six 7️⃣ Keycap Digit Seven 8️⃣ Keycap Digit Eight 9️⃣ Keycap Digit Nine 🔟 Keycap 10 💯 Hundred Points 🔠 Input Latin Uppercase 🔡 Input Latin Lowercase 🔢 Input Numbers 🔣 Input Symbols 🔤 Input Latin Letters 🅰 A Button (blood Type) 🆎 Ab Button (blood Type) 🅱 B Button (blood Type) 🆑 CL Button 🆒 Cool Button 🆓 Free Button ℹ Information 🆔 ID Button Ⓜ Circled M 🆕 New Button 🆖 NG Button 🅾 O Button (blood Type) 🆗 OK Button 🅿 P Button 🆘 SOS Button 🆙 Up! Button 🆚 Vs Button 🈁 Japanese “here” Button 🈂 Japanese “service Charge” Button 🈷 Japanese “monthly Amount” Button 🈶 Japanese “not Free of Charge” Button 🈯 Japanese “reserved” Button 🉐 Japanese “bargain” Button 🈹 Japanese “discount” Button 🈚 Japanese “free of Charge” Button 🈲 Japanese “prohibited” Button 🉑 Japanese “acceptable” Button 🈸 Japanese “application” Button 🈴 Japanese “passing Grade” Button 🈳 Japanese “vacancy” Button ㊗ Japanese “congratulations” Button ㊙ Japanese “secret” Button 🈺 Japanese “open for Business” Button 🈵 Japanese “no Vacancy” Button ▪ Black Small Square ▫ White Small Square ◻ White Medium Square ◼ Black Medium Square ◽ White Medium-Small Square ◾ Black Medium-Small Square ⬛ Black Large Square ⬜ White Large Square 🔶 Large Orange Diamond 🔷 Large Blue Diamond 🔸 Small Orange Diamond 🔹 Small Blue Diamond 🔺 Red Triangle Pointed Up 🔻 Red Triangle Pointed Down 💠 Diamond With a Dot 🔘 Radio Button 🔲 Black Square Button 🔳 White Square Button ⚪ White Circle ⚫ Black Circle 🔴 Red Circle 🔵 Blue Circle 🏁 Chequered Flag 🚩 Triangular Flag 🎌 Crossed Flags 🏴 Black Flag 🏳 White Flag 🏳️‍🌈 Rainbow Flag 🇦🇨 Ascension Island 🇦🇩 Andorra 🇦🇪 United Arab Emirates 🇦🇫 Afghanistan 🇦🇬 Antigua & Barbuda 🇦🇮 Anguilla 🇦🇱 Albania 🇦🇲 Armenia 🇦🇴 Angola 🇦🇶 Antarctica 🇦🇷 Argentina 🇦🇸 American Samoa 🇦🇹 Austria 🇦🇺 Australia 🇦🇼 Aruba 🇦🇽 Åland Islands 🇦🇿 Azerbaijan 🇧🇦 Bosnia & Herzegovina 🇧🇧 Barbados 🇧🇩 Bangladesh 🇧🇪 Belgium 🇧🇫 Burkina Faso 🇧🇬 Bulgaria 🇧🇭 Bahrain 🇧🇮 Burundi 🇧🇯 Benin 🇧🇱 St. Barthélemy 🇧🇲 Bermuda 🇧🇳 Brunei 🇧🇴 Bolivia 🇧🇶 Caribbean Netherlands 🇧🇷 Brazil 🇧🇸 Bahamas 🇧🇹 Bhutan 🇧🇻 Bouvet Island 🇧🇼 Botswana 🇧🇾 Belarus 🇧🇿 Belize 🇨🇦 Canada 🇨🇨 Cocos (Keeling) Islands 🇨🇩 Congo - Kinshasa 🇨🇫 Central African Republic 🇨🇬 Congo - Brazzaville 🇨🇭 Switzerland 🇨🇮 Côte D’Ivoire 🇨🇰 Cook Islands 🇨🇱 Chile 🇨🇲 Cameroon 🇨🇳 China 🇨🇴 Colombia 🇨🇵 Clipperton Island 🇨🇷 Costa Rica 🇨🇺 Cuba 🇨🇻 Cape Verde 🇨🇼 Curaçao 🇨🇽 Christmas Island 🇨🇾 Cyprus 🇨🇿 Czechia 🇩🇪 Germany 🇩🇬 Diego Garcia 🇩🇯 Djibouti 🇩🇰 Denmark 🇩🇲 Dominica 🇩🇴 Dominican Republic 🇩🇿 Algeria 🇪🇦 Ceuta & Melilla 🇪🇨 Ecuador 🇪🇪 Estonia 🇪🇬 Egypt 🇪🇭 Western Sahara 🇪🇷 Eritrea 🇪🇸 Spain 🇪🇹 Ethiopia 🇪🇺 European Union 🇫🇮 Finland 🇫🇯 Fiji 🇫🇰 Falkland Islands 🇫🇲 Micronesia 🇫🇴 Faroe Islands 🇫🇷 France 🇬🇦 Gabon 🇬🇧 United Kingdom 🇬🇩 Grenada 🇬🇪 Georgia 🇬🇫 French Guiana 🇬🇬 Guernsey 🇬🇭 Ghana 🇬🇮 Gibraltar 🇬🇱 Greenland 🇬🇲 Gambia 🇬🇳 Guinea 🇬🇵 Guadeloupe 🇬🇶 Equatorial Guinea 🇬🇷 Greece 🇬🇸 South Georgia & South Sandwich Islands 🇬🇹 Guatemala 🇬🇺 Guam 🇬🇼 Guinea-Bissau 🇬🇾 Guyana 🇭🇰 Hong Kong Sar China 🇭🇲 Heard & Mcdonald Islands 🇭🇳 Honduras 🇭🇷 Croatia 🇭🇹 Haiti 🇭🇺 Hungary 🇮🇨 Canary Islands 🇮🇩 Indonesia 🇮🇪 Ireland 🇮🇱 Israel 🇮🇲 Isle of Man 🇮🇳 India 🇮🇴 British Indian Ocean Territory 🇮🇶 Iraq 🇮🇷 Iran 🇮🇸 Iceland 🇮🇹 Italy 🇯🇪 Jersey 🇯🇲 Jamaica 🇯🇴 Jordan 🇯🇵 Japan 🇰🇪 Kenya 🇰🇬 Kyrgyzstan 🇰🇭 Cambodia 🇰🇮 Kiribati 🇰🇲 Comoros 🇰🇳 St. Kitts & Nevis 🇰🇵 North Korea 🇰🇷 South Korea 🇰🇼 Kuwait 🇰🇾 Cayman Islands 🇰🇿 Kazakhstan 🇱🇦 Laos 🇱🇧 Lebanon 🇱🇨 St. Lucia 🇱🇮 Liechtenstein 🇱🇰 Sri Lanka 🇱🇷 Liberia 🇱🇸 Lesotho 🇱🇹 Lithuania 🇱🇺 Luxembourg 🇱🇻 Latvia 🇱🇾 Libya 🇲🇦 Morocco 🇲🇨 Monaco 🇲🇩 Moldova 🇲🇪 Montenegro 🇲🇫 St. Martin 🇲🇬 Madagascar 🇲🇭 Marshall Islands 🇲🇰 Macedonia 🇲🇱 Mali 🇲🇲 Myanmar (Burma) 🇲🇳 Mongolia 🇲🇴 Macau Sar China 🇲🇵 Northern Mariana Islands 🇲🇶 Martinique 🇲🇷 Mauritania 🇲🇸 Montserrat 🇲🇹 Malta 🇲🇺 Mauritius 🇲🇻 Maldives 🇲🇼 Malawi 🇲🇽 Mexico 🇲🇾 Malaysia 🇲🇿 Mozambique 🇳🇦 Namibia 🇳🇨 New Caledonia 🇳🇪 Niger 🇳🇫 Norfolk Island 🇳🇬 Nigeria 🇳🇮 Nicaragua 🇳🇱 Netherlands 🇳🇴 Norway 🇳🇵 Nepal 🇳🇷 Nauru 🇳🇺 Niue 🇳🇿 New Zealand 🇴🇲 Oman 🇵🇦 Panama 🇵🇪 Peru 🇵🇫 French Polynesia 🇵🇬 Papua New Guinea 🇵🇭 Philippines 🇵🇰 Pakistan 🇵🇱 Poland 🇵🇲 St. Pierre & Miquelon 🇵🇳 Pitcairn Islands 🇵🇷 Puerto Rico 🇵🇸 Palestinian Territories 🇵🇹 Portugal 🇵🇼 Palau 🇵🇾 Paraguay 🇶🇦 Qatar 🇷🇪 Réunion 🇷🇴 Romania 🇷🇸 Serbia 🇷🇺 Russia 🇷🇼 Rwanda 🇸🇦 Saudi Arabia 🇸🇧 Solomon Islands 🇸🇨 Seychelles 🇸🇩 Sudan 🇸🇪 Sweden 🇸🇬 Singapore 🇸🇭 St. Helena 🇸🇮 Slovenia 🇸🇯 Svalbard & Jan Mayen 🇸🇰 Slovakia 🇸🇱 Sierra Leone 🇸🇲 San Marino 🇸🇳 Senegal 🇸🇴 Somalia 🇸🇷 Suriname 🇸🇸 South Sudan 🇸🇹 São Tomé & Príncipe 🇸🇻 El Salvador 🇸🇽 Sint Maarten 🇸🇾 Syria 🇸🇿 Swaziland 🇹🇦 Tristan Da Cunha 🇹🇨 Turks & Caicos Islands 🇹🇩 Chad 🇹🇫 French Southern Territories 🇹🇬 Togo 🇹🇭 Thailand 🇹🇯 Tajikistan 🇹🇰 Tokelau 🇹🇱 Timor-Leste 🇹🇲 Turkmenistan 🇹🇳 Tunisia 🇹🇴 Tonga 🇹🇷 Turkey 🇹🇹 Trinidad & Tobago 🇹🇻 Tuvalu 🇹🇼 Taiwan 🇹🇿 Tanzania 🇺🇦 Ukraine 🇺🇬 Uganda 🇺🇲 U.S. Outlying Islands 🇺🇳 United Nations 🇺🇸 United States 🇺🇾 Uruguay 🇺🇿 Uzbekistan 🇻🇦 Vatican City 🇻🇨 St. Vincent & Grenadines 🇻🇪 Venezuela 🇻🇬 British Virgin Islands 🇻🇮 U.S. Virgin Islands 🇻🇳 Vietnam 🇻🇺 Vanuatu 🇼🇫 Wallis & Futuna 🇼🇸 Samoa 🇽🇰 Kosovo 🇾🇪 Yemen 🇾🇹 Mayotte 🇿🇦 South Africa 🇿🇲 Zambia 🇿🇼 Zimbabwe 🏴󠁧󠁢󠁥󠁮󠁧󠁿 Flag for England (GB-ENG) 🏴󠁧󠁢󠁳󠁣󠁴󠁿 Flag for Scotland (GB-SCT) 🏴󠁧󠁢󠁷󠁬󠁳󠁿 Flag for Wales (GB-WLS) 🥆 Rifle 🤻 Modern Pentathlon 🏴‍☠️ Pirate Flag 🇦 Regional Indicator Symbol Letter A 🇧 Regional Indicator Symbol Letter B 🇨 Regional Indicator Symbol Letter C 🇩 Regional Indicator Symbol Letter D 🇪 Regional Indicator Symbol Letter E 🇫 Regional Indicator Symbol Letter F 🇬 Regional Indicator Symbol Letter G 🇭 Regional Indicator Symbol Letter H 🇮 Regional Indicator Symbol Letter I 🇯 Regional Indicator Symbol Letter J 🇰 Regional Indicator Symbol Letter K 🇱 Regional Indicator Symbol Letter L 🇲 Regional Indicator Symbol Letter M 🇳 Regional Indicator Symbol Letter N 🇴 Regional Indicator Symbol Letter O 🇵 Regional Indicator Symbol Letter P 🇶 Regional Indicator Symbol Letter Q 🇷 Regional Indicator Symbol Letter R 🇸 Regional Indicator Symbol Letter S 🇹 Regional Indicator Symbol Letter T 🇺 Regional Indicator Symbol Letter U 🇻 Regional Indicator Symbol Letter V 🇼 Regional Indicator Symbol Letter W 🇽 Regional Indicator Symbol Letter X 🇾 Regional Indicator Symbol Letter Y 🇿 Regional Indicator Symbol Letter Z 🐱‍🐉 Dino Cat 🐱‍🚀 Astro Cat 🐱‍👤 Ninja Cat 🐱‍💻 Hacker Cat 🐱‍🏍 Stunt Cat 🐱‍👓 Hipster Cat ◯‍◯‍◯‍◯‍◯ Olympic Rings 🏴󠁮󠁲󠀰󠀵󠁿 Flag for Baiti (NR-05) 🏴󠁮󠁯󠀱󠀷󠁿 Flag for Nord-Trøndelag (NO-17) 🏴󠁮󠁯󠀱󠀲󠁿 Flag for Hordaland (NO-12) 🏴󠁮󠁯󠀰󠀲󠁿 Flag for Akershus (NO-02) 🏴󠁮󠁯󠀱󠀶󠁿 Flag for Sør-Trøndelag (NO-16) 🏴󠁮󠁯󠀰󠀸󠁿 Flag for Telemark (NO-08) 🏴󠁮󠁬󠁵󠁴󠁿 Flag for Utrecht (NL-UT) 🏴󠁮󠁯󠀱󠀵󠁿 Flag for Møre og Romsdal (NO-15) 🏴󠁮󠁯󠀲󠀱󠁿 Flag for Svalbard (NO-21) 🏴󠁮󠁰󠀴󠁿 Flag for Purwanchal (NP-4) 🏴󠁮󠁰󠀱󠁿 Flag for Central (NP-1) 🏴󠁮󠁯󠀰󠀳󠁿 Flag for Oslo (NO-03) 🏴󠁮󠁲󠀰󠀶󠁿 Flag for Boe (NR-06) 👨🏾‍👨🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁮󠁬󠁮󠁢󠁿 Flag for North Brabant (NL-NB) 🏴󠁮󠁯󠀰󠀹󠁿 Flag for Aust-Agder (NO-09) 🏴󠁮󠁲󠀰󠀲󠁿 Flag for Anabar (NR-02) 🏴󠁮󠁬󠁬󠁩󠁿 Flag for Limburg (NL-LI) 🏴󠁮󠁯󠀰󠀶󠁿 Flag for Buskerud (NO-06) 🏴󠁮󠁯󠀰󠀴󠁿 Flag for Hedmark (NO-04) 🏴󠁮󠁯󠀰󠀷󠁿 Flag for Vestfold (NO-07) 🏴󠁮󠁲󠀰󠀴󠁿 Flag for Anibare (NR-04) 🏴󠁮󠁯󠀲󠀰󠁿 Flag for Finnmark (NO-20) 🏴󠁮󠁬󠁯󠁶󠁿 Flag for Overijssel (NL-OV) 🏴󠁮󠁯󠀱󠀱󠁿 Flag for Rogaland (NO-11) 🏴󠁮󠁯󠀰󠀱󠁿 Flag for Østfold (NO-01) 🏴󠁮󠁲󠀰󠀱󠁿 Flag for Aiwo (NR-01) 🏴󠁮󠁬󠁺󠁥󠁿 Flag for Zeeland (NL-ZE) 🏴󠁮󠁲󠀰󠀷󠁿 Flag for Buada (NR-07) 🏴󠁮󠁯󠀱󠀹󠁿 Flag for Troms (NO-19) 🏴󠁮󠁯󠀰󠀵󠁿 Flag for Oppland (NO-05) 🏴󠁮󠁰󠀲󠁿 Flag for Madhya Pashchimanchal (NP-2) 🏴󠁮󠁲󠀰󠀳󠁿 Flag for Anetan (NR-03) 🏴󠁮󠁰󠀳󠁿 Flag for Western (NP-3) 🏴󠁮󠁯󠀲󠀲󠁿 Flag for Jan Mayen (NO-22) 🏴󠁮󠁯󠀱󠀸󠁿 Flag for Nordland (NO-18) 🏴󠁰󠁡󠀱󠁿 Flag for Bocas del Toro (PA-1) 🏴󠁰󠁡󠀳󠁿 Flag for Colón (PA-3) 🏴󠁯󠁭󠁤󠁡󠁿 Flag for Ad Dakhiliyah (OM-DA) 🏴󠁯󠁭󠁭󠁡󠁿 Flag for Muscat (OM-MA) 🏴󠁮󠁲󠀰󠀹󠁿 Flag for Ewa (NR-09) 🏴󠁮󠁺󠁴󠁫󠁩󠁿 Flag for Taranaki (NZ-TKI) 🏴󠁮󠁲󠀱󠀰󠁿 Flag for Ijuw (NR-10) 🏴󠁮󠁺󠁷󠁴󠁣󠁿 Flag for West Coast (NZ-WTC) 🏴󠁮󠁺󠁳󠁴󠁬󠁿 Flag for Southland (NZ-STL) 🏴󠁮󠁺󠁴󠁡󠁳󠁿 Flag for Tasman (NZ-TAS) 🏴󠁮󠁺󠁭󠁷󠁴󠁿 Flag for Manawatu-Wanganui (NZ-MWT) 🏴󠁮󠁺󠁷󠁫󠁯󠁿 Flag for Waikato (NZ-WKO) 🏴󠁮󠁺󠁭󠁢󠁨󠁿 Flag for Marl (NZ-MBH) 🏴󠁮󠁺󠁢󠁯󠁰󠁿 Flag for Bay of Plenty (NZ-BOP) 🏴󠁮󠁲󠀱󠀲󠁿 Flag for Nibok (NR-12) 🏴󠁯󠁭󠁢󠁵󠁿 Flag for Al Buraimi (OM-BU) 🏴󠁮󠁺󠁡󠁵󠁫󠁿 Flag for Auckland (NZ-AUK) 🏴󠁯󠁭󠁳󠁪󠁿 Flag for Janub ash Sharqiyah (OM-SJ) 🏴󠁯󠁭󠁳󠁳󠁿 Flag for Shamal ash Sharqiyah (OM-SS) 🏴󠁰󠁡󠀲󠁿 Flag for Coclé (PA-2) 🏴󠁮󠁲󠀱󠀱󠁿 Flag for Meneng (NR-11) 🏴󠁰󠁡󠀱󠀰󠁿 Flag for West Panamá (PA-10) 🏴󠁯󠁭󠁺󠁡󠁿 Flag for Ad Dhahirah (OM-ZA) 🏴󠁮󠁺󠁮󠁴󠁬󠁿 Flag for Northland (NZ-NTL) 🏴󠁮󠁺󠁣󠁡󠁮󠁿 Flag for Canterbury (NZ-CAN) 🏴󠁮󠁺󠁧󠁩󠁳󠁿 Flag for Gisborne (NZ-GIS) 🏴󠁮󠁺󠁣󠁩󠁴󠁿 Flag for Chatham Islands (NZ-CIT) 🏴󠁮󠁲󠀱󠀳󠁿 Flag for Uaboe (NR-13) 🏴󠁮󠁲󠀰󠀸󠁿 Flag for Denigomodu (NR-08) 🏴󠁯󠁭󠁭󠁵󠁿 Flag for Musandam (OM-MU) 🏴󠁯󠁭󠁢󠁳󠁿 Flag for Shamal al Batinah (OM-BS) 🏴󠁮󠁺󠁨󠁫󠁢󠁿 Flag for Hawke’s Bay (NZ-HKB) 🏴󠁮󠁺󠁯󠁴󠁡󠁿 Flag for Otago (NZ-OTA) 🏴󠁯󠁭󠁢󠁪󠁿 Flag for Janub al Batinah (OM-BJ) 🏴󠁯󠁭󠁺󠁵󠁿 Flag for Dhofar (OM-ZU) 🏴󠁰󠁡󠀵󠁿 Flag for Darién (PA-5) 🏴󠁰󠁥󠁣󠁡󠁬󠁿 Flag for El Callao (PE-CAL) 🏴󠁰󠁡󠀶󠁿 Flag for Herrera (PA-6) 🏴󠁰󠁡󠁫󠁹󠁿 Flag for Guna Yala (PA-KY) 🏴󠁰󠁡󠁥󠁭󠁿 Flag for Emberá (PA-EM) 🏴󠁰󠁥󠁬󠁡󠁬󠁿 Flag for La Libertad (PE-LAL) 🏴󠁰󠁡󠀹󠁿 Flag for Veraguas (PA-9) 🏴󠁰󠁥󠁬󠁯󠁲󠁿 Flag for Loreto (PE-LOR) 🏴󠁰󠁥󠁡󠁭󠁡󠁿 Flag for Amazonas (PE-AMA) 🏴󠁰󠁡󠀴󠁿 Flag for Chiriquí (PA-4) 🏴󠁰󠁧󠁣󠁰󠁫󠁿 Flag for Chimbu (PG-CPK) 🏴󠁰󠁧󠁥󠁨󠁧󠁿 Flag for Eastern Highlands (PG-EHG) 🏴󠁰󠁥󠁳󠁡󠁭󠁿 Flag for San Martín (PE-SAM) 🏴󠁰󠁥󠁪󠁵󠁮󠁿 Flag for Junín (PE-JUN) 🏴󠁰󠁥󠁨󠁵󠁣󠁿 Flag for Huánuco (PE-HUC) 🏴󠁰󠁥󠁰󠁡󠁳󠁿 Flag for Pasco (PE-PAS) 🏴󠁰󠁡󠁮󠁢󠁿 Flag for Ngöbe-Buglé (PA-NB) 🏴󠁰󠁥󠁣󠁡󠁪󠁿 Flag for Cajamarca (PE-CAJ) 🏴󠁰󠁥󠁩󠁣󠁡󠁿 Flag for Ica (PE-ICA) 🏴󠁰󠁥󠁬󠁩󠁭󠁿 Flag for Lima Region (PE-LIM) 🏴󠁰󠁥󠁭󠁯󠁱󠁿 Flag for Moquegua (PE-MOQ) 🏴󠁰󠁥󠁰󠁵󠁮󠁿 Flag for Puno (PE-PUN) 🏴󠁰󠁥󠁵󠁣󠁡󠁿 Flag for Ucayali (PE-UCA) 🏴󠁰󠁥󠁬󠁭󠁡󠁿 Flag for Lima (PE-LMA) 🏴󠁰󠁥󠁰󠁩󠁵󠁿 Flag for Piura (PE-PIU) 🏴󠁰󠁥󠁴󠁵󠁭󠁿 Flag for Tumbes (PE-TUM) 🏴󠁰󠁥󠁣󠁵󠁳󠁿 Flag for Cusco (PE-CUS) 🏴󠁰󠁡󠀸󠁿 Flag for Panamá (PA-8) 🏴󠁰󠁥󠁴󠁡󠁣󠁿 Flag for Tacna (PE-TAC) 🏴󠁰󠁧󠁣󠁰󠁭󠁿 Flag for Central (PG-CPM) 🏴󠁰󠁡󠀷󠁿 Flag for Los Santos (PA-7) 🏴󠁰󠁥󠁬󠁡󠁭󠁿 Flag for Lambayeque (PE-LAM) 🏴󠁰󠁥󠁨󠁵󠁶󠁿 Flag for Huancavelica (PE-HUV) 🏴󠁰󠁥󠁡󠁮󠁣󠁿 Flag for Ancash (PE-ANC) 🏴󠁰󠁧󠁨󠁬󠁡󠁿 Flag for Hela (PG-HLA) 🏴󠁰󠁧󠁮󠁣󠁤󠁿 Flag for Port Moresby (PG-NCD) 🏴󠁰󠁫󠁩󠁳󠁿 Flag for Islamabad (PK-IS) 🏴󠁰󠁨󠀰󠀰󠁿 Flag for Metro Manila (PH-00) 🏴󠁰󠁨󠀰󠀵󠁿 Flag for Bicol (PH-05) 🏴󠁰󠁧󠁧󠁰󠁫󠁿 Flag for Gulf (PG-GPK) 🏴󠁰󠁨󠀰󠀹󠁿 Flag for Zamboanga Peninsula (PH-09) 🏴󠁰󠁧󠁮󠁳󠁢󠁿 Flag for Bougainville (PG-NSB) 🏴󠁰󠁫󠁧󠁢󠁿 Flag for Gilgit-Baltistan (PK-GB) 🏴󠁰󠁧󠁭󠁰󠁭󠁿 Flag for Madang (PG-MPM) 🏴󠁦󠁪󠁷󠁿 Flag for Western (FJ-W) 🏴󠁰󠁨󠀱󠀲󠁿 Flag for Soccsksargen (PH-12) 🏴󠁰󠁨󠀰󠀸󠁿 Flag for Eastern Visayas (PH-08) 🏴󠁰󠁧󠁥󠁰󠁷󠁿 Flag for Enga (PG-EPW) 🏴󠁰󠁧󠁭󠁢󠁡󠁿 Flag for Milne Bay (PG-MBA) 🏴󠁰󠁨󠀴󠀰󠁿 Flag for Calabarzon (PH-40) 🏴󠁰󠁧󠁪󠁷󠁫󠁿 Flag for Jiwaka (PG-JWK) 🏴󠁰󠁨󠀰󠀲󠁿 Flag for Cagayan Valley (PH-02) 👨🏿‍👨🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁰󠁧󠁭󠁰󠁬󠁿 Flag for Morobe (PG-MPL) 🏴󠁰󠁨󠀱󠀰󠁿 Flag for Northern Mindanao (PH-10) 🏴󠁰󠁨󠀴󠀱󠁿 Flag for Mimaropa (PH-41) 🏴󠁰󠁫󠁢󠁡󠁿 Flag for Balochistan (PK-BA) 🏴󠁰󠁨󠀱󠀳󠁿 Flag for Caraga (PH-13) 🏴󠁰󠁧󠁥󠁳󠁷󠁿 Flag for East Sepik (PG-ESW) 🏴󠁰󠁨󠀰󠀶󠁿 Flag for Western Visayas (PH-06) 🏴󠁰󠁨󠀰󠀳󠁿 Flag for Central Luzon (PH-03) 🏴󠁰󠁨󠀱󠀴󠁿 Flag for Muslim Mindanao (PH-14) 🏴󠁰󠁧󠁳󠁨󠁭󠁿 Flag for Southern Highlands (PG-SHM) 🏴󠁰󠁧󠁷󠁰󠁤󠁿 Flag for Western (PG-WPD) 🏴󠁰󠁧󠁳󠁡󠁮󠁿 Flag for Sandaun (PG-SAN) 🏴󠁰󠁧󠁮󠁩󠁫󠁿 Flag for New Ireland (PG-NIK) 🏴󠁰󠁧󠁮󠁰󠁰󠁿 Flag for Oro (PG-NPP) 🏴󠁰󠁧󠁭󠁲󠁬󠁿 Flag for Manus (PG-MRL) 🏴󠁰󠁧󠁷󠁨󠁭󠁿 Flag for Western Highlands (PG-WHM) 🏴󠁰󠁨󠀱󠀱󠁿 Flag for Davao (PH-11) 🏴󠁰󠁫󠁰󠁢󠁿 Flag for Punjab (PK-PB) 🏴󠁰󠁬󠁰󠁭󠁿 Flag for Federal Capital Territory (PL-PM) 🏴󠁰󠁬󠁳󠁬󠁿 Flag for Silesia (PL-SL) 🏴󠁰󠁬󠁫󠁰󠁿 Flag for Kuyavian-Pomerania (PL-KP) 🏴󠁰󠁳󠁴󠁢󠁳󠁿 Flag for Tubas (PS-TBS) 🏴󠁰󠁳󠁲󠁢󠁨󠁿 Flag for Ramallah and al-Bireh (PS-RBH) 🏴󠁰󠁳󠁧󠁺󠁡󠁿 Flag for Gaza (PS-GZA) 🏴󠁰󠁳󠁲󠁦󠁨󠁿 Flag for Rafah (PS-RFH) 🏴󠁰󠁳󠁨󠁢󠁮󠁿 Flag for Hebron (PS-HBN) 🏴󠁰󠁬󠁰󠁤󠁿 Flag for Podlaskie (PL-PD) 🏴󠁰󠁬󠁰󠁫󠁿 Flag for Subcarpathia (PL-PK) 🏴󠁰󠁳󠁪󠁥󠁮󠁿 Flag for Jenin (PS-JEN) 🏴󠁰󠁬󠁤󠁳󠁿 Flag for Lower Silesian (PL-DS) 🏴󠁰󠁳󠁫󠁹󠁳󠁿 Flag for Khan Yunis (PS-KYS) 🏴󠁰󠁬󠁬󠁤󠁿 Flag for Łódź (PL-LD) 🏴󠁰󠁳󠁮󠁧󠁺󠁿 Flag for North Gaza (PS-NGZ) 🏴󠁰󠁬󠁺󠁰󠁿 Flag for West Pomerania (PL-ZP) 🏴󠁰󠁫󠁪󠁫󠁿 Flag for Azad Kashmir (PK-JK) 🏴󠁰󠁳󠁳󠁬󠁴󠁿 Flag for Salfit (PS-SLT) 🏴󠁰󠁬󠁭󠁺󠁿 Flag for Mazovia (PL-MZ) 🏴󠁰󠁬󠁭󠁡󠁿 Flag for Lesser Poland (PL-MA) 🏴󠁰󠁳󠁱󠁱󠁡󠁿 Flag for Qalqilya (PS-QQA) 🏴󠁰󠁴󠀰󠀱󠁿 Flag for Aveiro (PT-01) 🏴󠁰󠁬󠁷󠁰󠁿 Flag for Greater Poland (PL-WP) 🏴󠁰󠁬󠁯󠁰󠁿 Flag for Opole (PL-OP) 🏴󠁰󠁳󠁢󠁴󠁨󠁿 Flag for Bethlehem (PS-BTH) 🏴󠁰󠁫󠁫󠁰󠁿 Flag for Khyber Pakhtunkhwa (PK-KP) 🏴󠁰󠁳󠁴󠁫󠁭󠁿 Flag for Tulkarm (PS-TKM) 🏴󠁰󠁳󠁮󠁢󠁳󠁿 Flag for Nablus (PS-NBS) 🏴󠁰󠁬󠁷󠁮󠁿 Flag for Warmian-Masuria (PL-WN) 🏴󠁰󠁳󠁪󠁲󠁨󠁿 Flag for Jericho (PS-JRH) 🏴󠁰󠁫󠁳󠁤󠁿 Flag for Sindh (PK-SD) 🏴󠁰󠁬󠁬󠁵󠁿 Flag for Lublin (PL-LU) 🏴󠁰󠁳󠁪󠁥󠁭󠁿 Flag for Jerusalem (PS-JEM) 🏴󠁰󠁬󠁬󠁢󠁿 Flag for Lubusz (PL-LB) 🏴󠁰󠁬󠁳󠁫󠁿 Flag for Świętokrzyskie (PL-SK) 🏴󠁰󠁷󠀲󠀱󠀲󠁿 Flag for Melekeok (PW-212) 🏴󠁰󠁴󠀰󠀸󠁿 Flag for Faro (PT-08) 🏴󠁰󠁹󠀱󠀱󠁿 Flag for Central (PY-11) 🏴󠁰󠁴󠀰󠀷󠁿 Flag for Évora (PT-07) 🏴󠁰󠁷󠀲󠀲󠀸󠁿 Flag for Ngiwal (PW-228) 🏴󠁰󠁹󠀱󠀲󠁿 Flag for Ñeembucú (PY-12) 🏴󠁰󠁴󠀱󠀶󠁿 Flag for Viana do Castelo (PT-16) 🏴󠁰󠁴󠀱󠀱󠁿 Flag for Lisbon (PT-11) 🏴󠁰󠁹󠀱󠀵󠁿 Flag for Presidente Hayes (PY-15) 🏴󠁰󠁴󠀱󠀷󠁿 Flag for Vila Real (PT-17) 🏴󠁰󠁴󠀱󠀸󠁿 Flag for Viseu (PT-18) 🏴󠁰󠁷󠀰󠀰󠀴󠁿 Flag for Airai (PW-004) 🏴󠁰󠁹󠀱󠀳󠁿 Flag for Amambay (PY-13) 🏴󠁰󠁷󠀲󠀲󠀴󠁿 Flag for Ngatpang (PW-224) 🏴󠁰󠁴󠀰󠀶󠁿 Flag for Coimbra (PT-06) 🏴󠁰󠁴󠀱󠀲󠁿 Flag for Portalegre (PT-12) 🏴󠁰󠁷󠀳󠀵󠀰󠁿 Flag for Peleliu (PW-350) 🏴󠁰󠁷󠀲󠀲󠀲󠁿 Flag for Ngardmau (PW-222) 🏴󠁰󠁷󠀲󠀱󠀴󠁿 Flag for Ngaraard (PW-214) 🏴󠁰󠁹󠀱󠀴󠁿 Flag for Canindeyú (PY-14) 🏴󠁰󠁷󠀰󠀱󠀰󠁿 Flag for Angaur (PW-010) 🏴󠁰󠁷󠀳󠀷󠀰󠁿 Flag for Sonsorol (PW-370) 🏴󠁰󠁴󠀰󠀴󠁿 Flag for Bragança (PT-04) 🏴󠁰󠁴󠀰󠀵󠁿 Flag for Castelo Branco (PT-05) 🏴󠁰󠁴󠀱󠀴󠁿 Flag for Santarém (PT-14) 🏴󠁰󠁴󠀰󠀳󠁿 Flag for Braga (PT-03) 🏴󠁰󠁷󠀰󠀵󠀰󠁿 Flag for Hatohobei (PW-050) 🏴󠁰󠁷󠀱󠀵󠀰󠁿 Flag for Koror (PW-150) 🏴󠁰󠁹󠀱󠀰󠁿 Flag for Alto Paraná (PY-10) 🏴󠁰󠁷󠀲󠀲󠀷󠁿 Flag for Ngeremlengui (PW-227) 🏴󠁰󠁴󠀱󠀰󠁿 Flag for Leiria (PT-10) 🏴󠁰󠁴󠀱󠀳󠁿 Flag for Porto (PT-13) 🏴󠁰󠁴󠀱󠀵󠁿 Flag for Setúbal (PT-15) 🏴󠁰󠁷󠀰󠀰󠀲󠁿 Flag for Aimeliik (PW-002) 🏴󠁰󠁷󠀲󠀲󠀶󠁿 Flag for Ngchesar (PW-226) 🏴󠁰󠁴󠀰󠀹󠁿 Flag for Guarda (PT-09) 🏴󠁰󠁹󠀲󠁿 Flag for San Pedro (PY-2) 🏴󠁰󠁹󠀵󠁿 Flag for Caaguazú (PY-5) 🏴󠁰󠁹󠀴󠁿 Flag for Guairá (PY-4) 🏴󠁲󠁯󠁢󠁣󠁿 Flag for Bacău (RO-BC) 🏴󠁰󠁹󠀷󠁿 Flag for Itapúa (PY-7) 🏴󠁲󠁯󠁣󠁳󠁿 Flag for Caraș-Severin (RO-CS) 🏴󠁰󠁹󠀶󠁿 Flag for Caazapá (PY-6) 🏴󠁱󠁡󠁫󠁨󠁿 Flag for Al Khor (QA-KH) 🏴󠁲󠁯󠁣󠁶󠁿 Flag for Covasna (RO-CV) 🏴󠁲󠁯󠁡󠁢󠁿 Flag for Alba (RO-AB) 🏴󠁱󠁡󠁤󠁡󠁿 Flag for Doha (QA-DA) 🏴󠁲󠁯󠁤󠁪󠁿 Flag for Dolj (RO-DJ) 🏴󠁰󠁹󠀳󠁿 Flag for Cordillera (PY-3) 🏴󠁱󠁡󠁭󠁳󠁿 Flag for Madinat ash Shamal (QA-MS) 🏴󠁲󠁯󠁢󠁨󠁿 Flag for Bihor (RO-BH) 🏴󠁲󠁯󠁨󠁲󠁿 Flag for Harghita (RO-HR) 🏴󠁲󠁯󠁢󠁲󠁿 Flag for Brăila (RO-BR) 🏴󠁲󠁯󠁡󠁧󠁿 Flag for Argeș (RO-AG) 🏴󠁱󠁡󠁺󠁡󠁿 Flag for Al Daayen (QA-ZA) 🏴󠁲󠁯󠁢󠁮󠁿 Flag for Bistriţa-Năsăud (RO-BN) 🏴󠁲󠁯󠁣󠁬󠁿 Flag for Călărași (RO-CL) 🏴󠁰󠁹󠁡󠁳󠁵󠁿 Flag for Asunción (PY-ASU) 🏴󠁰󠁹󠀱󠁿 Flag for Concepción (PY-1) 🏴󠁲󠁯󠁢󠁴󠁿 Flag for Botoşani (RO-BT) 🏴󠁲󠁯󠁧󠁬󠁿 Flag for Galați (RO-GL) 🏴󠁲󠁯󠁧󠁲󠁿 Flag for Giurgiu (RO-GR) 🏴󠁰󠁹󠀱󠀹󠁿 Flag for Boquerón (PY-19) 🏴󠁰󠁹󠀸󠁿 Flag for Misiones (PY-8) 🏴󠁲󠁯󠁢󠁿 Flag for Bucharest (RO-B) 🏴󠁰󠁹󠀹󠁿 Flag for Paraguarí (PY-9) 🏴󠁱󠁡󠁲󠁡󠁿 Flag for Al Rayyan (QA-RA) 🏴󠁲󠁯󠁣󠁴󠁿 Flag for Constanța (RO-CT) 🏴󠁲󠁯󠁨󠁤󠁿 Flag for Hunedoara (RO-HD) 🏴󠁲󠁯󠁤󠁢󠁿 Flag for Dâmbovița (RO-DB) 🏴󠁲󠁯󠁡󠁲󠁿 Flag for Arad (RO-AR) 🏴󠁲󠁯󠁣󠁪󠁿 Flag for Cluj (RO-CJ) 🏴󠁲󠁯󠁢󠁺󠁿 Flag for Buzău (RO-BZ) 🏴󠁱󠁡󠁷󠁡󠁿 Flag for Al Wakrah (QA-WA) 🏴󠁲󠁯󠁶󠁬󠁿 Flag for Vâlcea (RO-VL) 🏴󠁲󠁯󠁩󠁳󠁿 Flag for Iași (RO-IS) 🏴󠁲󠁯󠁭󠁨󠁿 Flag for Mehedinți (RO-MH) 🏴󠁲󠁳󠁫󠁭󠁿 Flag for Kosovo-Metohija (RS-KM) 🏴󠁲󠁯󠁩󠁬󠁿 Flag for Ialomița (RO-IL) 🏴󠁲󠁯󠁴󠁲󠁿 Flag for Teleorman (RO-TR) 🏴󠁲󠁳󠀱󠀲󠁿 Flag for Šumadija (RS-12) 🏴󠁲󠁳󠀲󠀰󠁿 Flag for Nišava (RS-20) 🏴󠁲󠁵󠁡󠁬󠁿 Flag for Altai (RU-AL) 🏴󠁲󠁯󠁶󠁮󠁿 Flag for Vrancea (RO-VN) 🏴󠁲󠁯󠁶󠁳󠁿 Flag for Vaslui (RO-VS) 🏴󠁲󠁯󠁩󠁦󠁿 Flag for Ilfov (RO-IF) 🏴󠁲󠁳󠀰󠀸󠁿 Flag for Mačva (RS-08) 🏴󠁲󠁳󠀰󠀹󠁿 Flag for Kolubara (RS-09) 🏴󠁲󠁯󠁰󠁨󠁿 Flag for Prahova (RO-PH) 🏴󠁲󠁳󠀱󠀱󠁿 Flag for Braničevo (RS-11) 🏴󠁲󠁳󠀰󠀰󠁿 Flag for Beograd (RS-00) 🏴󠁲󠁳󠀱󠀵󠁿 Flag for Zaječar (RS-15) 🏴󠁲󠁳󠀱󠀷󠁿 Flag for Moravica (RS-17) 🏴󠁲󠁳󠀱󠀳󠁿 Flag for Pomoravlje (RS-13) 🏴󠁲󠁯󠁯󠁴󠁿 Flag for Olt (RO-OT) 🏴󠁲󠁯󠁳󠁭󠁿 Flag for Satu Mare (RO-SM) 🏴󠁲󠁳󠀲󠀱󠁿 Flag for Toplica (RS-21) 🏴󠁲󠁯󠁳󠁪󠁿 Flag for Sălaj (RO-SJ) 🏴󠁲󠁯󠁭󠁳󠁿 Flag for Mureş (RO-MS) 🏴󠁲󠁳󠀲󠀲󠁿 Flag for Pirot (RS-22) 🏴󠁲󠁳󠀱󠀹󠁿 Flag for Rasina (RS-19) 🏴󠁲󠁳󠀲󠀴󠁿 Flag for Pčinja (RS-24) 🏴󠁲󠁯󠁭󠁭󠁿 Flag for Maramureş (RO-MM) 🏴󠁲󠁯󠁳󠁶󠁿 Flag for Suceava (RO-SV) 🏴󠁲󠁳󠀱󠀸󠁿 Flag for Raška (RS-18) 🏴󠁲󠁳󠀱󠀴󠁿 Flag for Bor (RS-14) 🏴󠁲󠁳󠀱󠀰󠁿 Flag for Podunavlje (RS-10) 🏴󠁲󠁯󠁮󠁴󠁿 Flag for Neamţ (RO-NT) 🏴󠁲󠁳󠀱󠀶󠁿 Flag for Zlatibor (RS-16) 🏴󠁲󠁳󠁶󠁯󠁿 Flag for Vojvodina (RS-VO) 🏴󠁲󠁳󠀲󠀳󠁿 Flag for Jablanica (RS-23) 🏴󠁲󠁯󠁴󠁬󠁿 Flag for Tulcea (RO-TL) 🏴󠁲󠁵󠁡󠁤󠁿 Flag for Adygea (RU-AD) 🏴󠁲󠁯󠁴󠁭󠁿 Flag for Timiș (RO-TM) 👩🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁲󠁵󠁫󠁣󠁿 Flag for Karachay-Cherkess (RU-KC) 🏴󠁲󠁵󠁫󠁫󠁿 Flag for Khakassia (RU-KK) 🏴󠁲󠁵󠁢󠁵󠁿 Flag for Buryat (RU-BU) 🏴󠁲󠁵󠁫󠁬󠁿 Flag for Kalmykia (RU-KL) 🏴󠁲󠁵󠁢󠁥󠁬󠁿 Flag for Belgorod (RU-BEL) 🏴󠁲󠁵󠁫󠁨󠁭󠁿 Flag for Khanty-Mansi (RU-KHM) 🏴󠁲󠁵󠁬󠁥󠁮󠁿 Flag for Leningrad (RU-LEN) 🏴󠁲󠁵󠁫󠁧󠁮󠁿 Flag for Kurgan (RU-KGN) 🏴󠁲󠁵󠁩󠁶󠁡󠁿 Flag for Ivanovo (RU-IVA) 🏴󠁲󠁵󠁩󠁮󠁿 Flag for Ingushetia (RU-IN) 🏴󠁲󠁵󠁫󠁩󠁲󠁿 Flag for Kirov (RU-KIR) 🏴󠁲󠁵󠁫󠁤󠁡󠁿 Flag for Krasnodar Krai (RU-KDA) 🏴󠁲󠁵󠁫󠁲󠁿 Flag for Karelia (RU-KR) 🏴󠁲󠁵󠁭󠁡󠁧󠁿 Flag for Magadan (RU-MAG) 🏴󠁲󠁵󠁫󠁹󠁡󠁿 Flag for Krasnoyarsk Krai (RU-KYA) 🏴󠁲󠁵󠁫󠁥󠁭󠁿 Flag for Kemerovo (RU-KEM) 🏴󠁲󠁵󠁡󠁳󠁴󠁿 Flag for Astrakhan (RU-AST) 🏴󠁲󠁵󠁡󠁭󠁵󠁿 Flag for Amur (RU-AMU) 🏴󠁲󠁵󠁭󠁯󠁿 Flag for Mordovia (RU-MO) 🏴󠁲󠁵󠁫󠁯󠁿 Flag for Komi (RU-KO) 🏴󠁲󠁵󠁣󠁨󠁥󠁿 Flag for Chelyabinsk (RU-CHE) 🏴󠁲󠁵󠁫󠁨󠁡󠁿 Flag for Khabarovsk Krai (RU-KHA) 🏴󠁲󠁵󠁫󠁲󠁳󠁿 Flag for Kursk (RU-KRS) 🏴󠁲󠁵󠁭󠁥󠁿 Flag for Mari El (RU-ME) 🏴󠁲󠁵󠁣󠁨󠁵󠁿 Flag for Chukotka Okrug (RU-CHU) 🏴󠁲󠁵󠁫󠁧󠁤󠁿 Flag for Kaliningrad (RU-KGD) 🏴󠁲󠁵󠁩󠁲󠁫󠁿 Flag for Irkutsk (RU-IRK) 🏴󠁲󠁵󠁫󠁬󠁵󠁿 Flag for Kaluga (RU-KLU) 🏴󠁲󠁵󠁫󠁢󠁿 Flag for Kabardino-Balkar (RU-KB) 🏴󠁲󠁵󠁬󠁩󠁰󠁿 Flag for Lipetsk (RU-LIP) 🏴󠁲󠁵󠁢󠁡󠁿 Flag for Bashkortostan (RU-BA) 🏴󠁲󠁵󠁣󠁵󠁿 Flag for Chuvash (RU-CU) 🏴󠁲󠁵󠁫󠁡󠁭󠁿 Flag for Kamchatka Krai (RU-KAM) 🏴󠁲󠁵󠁫󠁯󠁳󠁿 Flag for Kostroma (RU-KOS) 🏴󠁲󠁵󠁳󠁡󠁫󠁿 Flag for Sakhalin (RU-SAK) 🏴󠁲󠁵󠁴󠁶󠁥󠁿 Flag for Tver (RU-TVE) 🏴󠁲󠁵󠁮󠁶󠁳󠁿 Flag for Novosibirsk (RU-NVS) 🏴󠁲󠁵󠁶󠁬󠁡󠁿 Flag for Vladimir (RU-VLA) 🏴󠁲󠁵󠁯󠁲󠁬󠁿 Flag for Oryol (RU-ORL) 🏴󠁲󠁵󠁳󠁴󠁡󠁿 Flag for Stavropol Krai (RU-STA) 🏴󠁲󠁵󠁮󠁩󠁺󠁿 Flag for Nizhny Novgorod (RU-NIZ) 🏴󠁲󠁵󠁳󠁡󠁲󠁿 Flag for Saratov (RU-SAR) 🏴󠁲󠁵󠁯󠁲󠁥󠁿 Flag for Orenburg (RU-ORE) 🏴󠁲󠁵󠁮󠁥󠁮󠁿 Flag for Nenets (RU-NEN) 🏴󠁲󠁵󠁶󠁧󠁧󠁿 Flag for Volgograd (RU-VGG) 🏴󠁲󠁵󠁴󠁯󠁭󠁿 Flag for Tomsk (RU-TOM) 🏴󠁲󠁵󠁳󠁶󠁥󠁿 Flag for Sverdlovsk (RU-SVE) 🏴󠁲󠁵󠁳󠁰󠁥󠁿 Flag for Saint Petersburg (RU-SPE) 🏴󠁲󠁵󠁹󠁡󠁮󠁿 Flag for Yamalo-Nenets Okrug (RU-YAN) 🏴󠁲󠁵󠁳󠁡󠁿 Flag for Sakha (RU-SA) 🏴󠁲󠁵󠁭󠁯󠁷󠁿 Flag for Moscow (RU-MOW) 🏴󠁲󠁵󠁰󠁮󠁺󠁿 Flag for Penza (RU-PNZ) 🏴󠁲󠁵󠁳󠁭󠁯󠁿 Flag for Smolensk (RU-SMO) 🏴󠁲󠁵󠁴󠁡󠁿 Flag for Tatarstan (RU-TA) 🏴󠁲󠁵󠁶󠁬󠁧󠁿 Flag for Vologda (RU-VLG) 🏴󠁲󠁵󠁴󠁵󠁬󠁿 Flag for Tula (RU-TUL) 🏴󠁲󠁵󠁹󠁡󠁲󠁿 Flag for Yaroslavl (RU-YAR) 🏴󠁲󠁵󠁴󠁹󠁵󠁿 Flag for Tyumen (RU-TYU) 🏴󠁲󠁵󠁰󠁳󠁫󠁿 Flag for Pskov (RU-PSK) 🏴󠁲󠁵󠁵󠁤󠁿 Flag for Udmurt (RU-UD) 🏴󠁲󠁵󠁳󠁡󠁭󠁿 Flag for Samara (RU-SAM) 🏴󠁲󠁵󠁵󠁬󠁹󠁿 Flag for Ulyanovsk (RU-ULY) 🏴󠁲󠁵󠁲󠁹󠁡󠁿 Flag for Ryazan (RU-RYA) 🏴󠁲󠁵󠁯󠁭󠁳󠁿 Flag for Omsk (RU-OMS) 🏴󠁲󠁵󠁰󠁥󠁲󠁿 Flag for Perm Krai (RU-PER) 🏴󠁲󠁵󠁶󠁯󠁲󠁿 Flag for Voronezh (RU-VOR) 🏴󠁲󠁵󠁮󠁧󠁲󠁿 Flag for Novgorod (RU-NGR) 🏴󠁲󠁵󠁴󠁡󠁭󠁿 Flag for Tambov (RU-TAM) 🏴󠁲󠁵󠁴󠁹󠁿 Flag for Tuva (RU-TY) 🏴󠁲󠁵󠁲󠁯󠁳󠁿 Flag for Rostov (RU-ROS) 🏴󠁲󠁵󠁭󠁵󠁲󠁿 Flag for Murmansk (RU-MUR) 🏴󠁲󠁷󠀰󠀱󠁿 Flag for Kigali (RW-01) 🏴󠁳󠁣󠀰󠀳󠁿 Flag for Anse Etoile (SC-03) 🏴󠁳󠁢󠁩󠁳󠁿 Flag for Isabel (SB-IS) 🏴󠁳󠁣󠀰󠀲󠁿 Flag for Anse Boileau (SC-02) 🏴󠁳󠁡󠀰󠀷󠁿 Flag for Tabuk (SA-07) 🏴󠁳󠁢󠁧󠁵󠁿 Flag for Guadalcanal (SB-GU) 🏴󠁲󠁷󠀰󠀳󠁿 Flag for Northern (RW-03) 🏴󠁲󠁷󠀰󠀵󠁿 Flag for Southern (RW-05) 🏴󠁳󠁢󠁣󠁥󠁿 Flag for Central (SB-CE) 🏴󠁳󠁡󠀰󠀶󠁿 Flag for Ha’il (SA-06) 🏴󠁳󠁣󠀰󠀹󠁿 Flag for Bel Air (SC-09) 🏴󠁳󠁢󠁭󠁬󠁿 Flag for Malaita (SB-ML) 🏴󠁳󠁡󠀱󠀰󠁿 Flag for Najran (SA-10) 🏴󠁳󠁡󠀱󠀲󠁿 Flag for Al Jawf (SA-12) 🏴󠁳󠁢󠁣󠁴󠁿 Flag for Honiara (SB-CT) 🏴󠁳󠁢󠁷󠁥󠁿 Flag for Western (SB-WE) 🏴󠁳󠁡󠀰󠀸󠁿 Flag for Northern Borders (SA-08) 🏴󠁳󠁡󠀰󠀱󠁿 Flag for Riyadh (SA-01) 🏴󠁳󠁢󠁲󠁢󠁿 Flag for Rennell and Bellona (SB-RB) 🏴󠁳󠁣󠀰󠀴󠁿 Flag for Au Cap (SC-04) 🏴󠁲󠁷󠀰󠀲󠁿 Flag for Eastern (RW-02) 🏴󠁳󠁣󠀰󠀵󠁿 Flag for Anse Royale (SC-05) 🏴󠁲󠁵󠁹󠁥󠁶󠁿 Flag for Jewish (RU-YEV) 🏴󠁳󠁣󠀱󠀰󠁿 Flag for Bel Ombre (SC-10) 🏴󠁳󠁡󠀰󠀵󠁿 Flag for Al-Qassim (SA-05) 🏴󠁳󠁢󠁴󠁥󠁿 Flag for Temotu (SB-TE) 🏴󠁳󠁣󠀰󠀷󠁿 Flag for Baie Sainte Anne (SC-07) 🏴󠁳󠁢󠁣󠁨󠁿 Flag for Choiseul (SB-CH) 🏴󠁲󠁷󠀰󠀴󠁿 Flag for Western (RW-04) 🏴󠁳󠁢󠁭󠁫󠁿 Flag for Makira-Ulawa (SB-MK) 🏴󠁳󠁡󠀰󠀲󠁿 Flag for Makkah (SA-02) 🏴󠁳󠁡󠀰󠀹󠁿 Flag for Jizan (SA-09) 🏴󠁳󠁣󠀰󠀱󠁿 Flag for Anse aux Pins (SC-01) 🏴󠁳󠁡󠀰󠀴󠁿 Flag for Eastern (SA-04) 🏴󠁳󠁡󠀱󠀴󠁿 Flag for Asir (SA-14) 🏴󠁲󠁵󠁺󠁡󠁢󠁿 Flag for Zabaykalsky Krai (RU-ZAB) 🏴󠁳󠁣󠀰󠀸󠁿 Flag for Beau Vallon (SC-08) 🏴󠁳󠁡󠀰󠀳󠁿 Flag for Al Madinah (SA-03) 🏴󠁳󠁣󠀰󠀶󠁿 Flag for Baie Lazare (SC-06) 🏴󠁳󠁣󠀱󠀹󠁿 Flag for Plaisance (SC-19) 🏴󠁳󠁥󠁤󠁿 Flag for Södermanland (SE-D) 🏴󠁳󠁣󠀱󠀶󠁿 Flag for La Rivière Anglaise (SC-16) 🏴󠁳󠁣󠀲󠀲󠁿 Flag for Saint Louis (SC-22) 🏴󠁳󠁣󠀱󠀸󠁿 Flag for Mont Fleuri (SC-18) 🏴󠁳󠁤󠁮󠁯󠁿 Flag for Northern (SD-NO) 🏴󠁳󠁣󠀱󠀳󠁿 Flag for Grand’Anse Mahé (SC-13) 🏴󠁳󠁣󠀲󠀳󠁿 Flag for Takamaka (SC-23) 🏴󠁳󠁤󠁤󠁷󠁿 Flag for West Darfur (SD-DW) 🏴󠁳󠁤󠁧󠁤󠁿 Flag for Al Qadarif (SD-GD) 🏴󠁳󠁤󠁤󠁳󠁿 Flag for South Darfur (SD-DS) 🏴󠁳󠁤󠁮󠁲󠁿 Flag for River Nile (SD-NR) 🏴󠁳󠁤󠁧󠁫󠁿 Flag for West Kurdufan (SD-GK) 🏴󠁳󠁤󠁫󠁡󠁿 Flag for Kassala (SD-KA) 🏴󠁳󠁤󠁫󠁨󠁿 Flag for Khartoum (SD-KH) 🏴󠁳󠁣󠀱󠀵󠁿 Flag for La Digue (SC-15) 🏴󠁳󠁣󠀲󠀴󠁿 Flag for Les Mamelles (SC-24) 🏴󠁳󠁣󠀲󠀱󠁿 Flag for Port Glaud (SC-21) 🏴󠁳󠁥󠁡󠁣󠁿 Flag for Västerbotten (SE-AC) 🏴󠁳󠁥󠁦󠁿 Flag for Jönköping (SE-F) 🏴󠁳󠁥󠁡󠁢󠁿 Flag for Stockholm (SE-AB) 🏴󠁳󠁣󠀱󠀲󠁿 Flag for Glacis (SC-12) 🏴󠁳󠁣󠀲󠀰󠁿 Flag for Pointe La Rue (SC-20) 🏴󠁳󠁤󠁮󠁷󠁿 Flag for White Nile (SD-NW) 🏴󠁳󠁤󠁧󠁺󠁿 Flag for Al Jazirah (SD-GZ) 🏴󠁳󠁥󠁥󠁿 Flag for Östergötland (SE-E) 🏴󠁳󠁥󠁢󠁤󠁿 Flag for Norrbotten (SE-BD) 🏴󠁳󠁥󠁣󠁿 Flag for Uppsala (SE-C) 🏴󠁳󠁣󠀱󠀷󠁿 Flag for Mont Buxton (SC-17) 🏴󠁳󠁣󠀱󠀴󠁿 Flag for Grand’Anse Praslin (SC-14) 🏴󠁳󠁤󠁫󠁳󠁿 Flag for South Kurdufan (SD-KS) 🏴󠁳󠁣󠀱󠀱󠁿 Flag for Cascade (SC-11) 🏴󠁳󠁤󠁫󠁮󠁿 Flag for North Kurdufan (SD-KN) 🏴󠁳󠁤󠁳󠁩󠁿 Flag for Sennar (SD-SI) 🏴󠁳󠁤󠁤󠁥󠁿 Flag for East Darfur (SD-DE) 🏴󠁳󠁤󠁮󠁢󠁿 Flag for Blue Nile (SD-NB) 🏴󠁳󠁤󠁤󠁮󠁿 Flag for North Darfur (SD-DN) 🏴󠁳󠁤󠁤󠁣󠁿 Flag for Central Darfur (SD-DC) 🏴󠁳󠁥󠁵󠁿 Flag for Västmanland (SE-U) 🏴󠁳󠁥󠁳󠁿 Flag for Värmland (SE-S) 🏴󠁳󠁩󠀰󠀱󠀷󠁿 Flag for Črnomelj (SI-017) 🏴󠁳󠁥󠁹󠁿 Flag for Västernorrland (SE-Y) 🏴󠁳󠁧󠀰󠀵󠁿 Flag for South West (SG-05) 🏴󠁳󠁩󠀰󠀱󠀶󠁿 Flag for Črna na Koroškem (SI-016) 🏴󠁳󠁥󠁯󠁿 Flag for Västra Götaland (SE-O) 🏴󠁳󠁥󠁸󠁿 Flag for Gävleborg (SE-X) 🏴󠁳󠁧󠀰󠀲󠁿 Flag for North East (SG-02) 🏴󠁳󠁩󠀰󠀰󠀷󠁿 Flag for Brda (SI-007) 🏴󠁳󠁥󠁨󠁿 Flag for Kalmar (SE-H) 🏴󠁳󠁩󠀰󠀱󠀸󠁿 Flag for Destrnik (SI-018) 🏴󠁳󠁩󠀰󠀰󠀲󠁿 Flag for Beltinci (SI-002) 🏴󠁳󠁩󠀰󠀰󠀴󠁿 Flag for Bohinj (SI-004) 🏴󠁳󠁩󠀰󠀰󠀹󠁿 Flag for Brežice (SI-009) 🏴󠁳󠁧󠀰󠀳󠁿 Flag for North West (SG-03) 🏴󠁳󠁨󠁡󠁣󠁿 Flag for Ascension Island (SH-AC) 👩🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁳󠁩󠀰󠀱󠀲󠁿 Flag for Cerklje na Gorenjskem (SI-012) 🏴󠁳󠁩󠀰󠀱󠀳󠁿 Flag for Cerknica (SI-013) 🏴󠁳󠁩󠀰󠀰󠀶󠁿 Flag for Bovec (SI-006) 🏴󠁳󠁩󠀰󠀱󠀵󠁿 Flag for Črenšovci (SI-015) 🏴󠁳󠁥󠁧󠁿 Flag for Kronoberg (SE-G) 🏴󠁳󠁩󠀰󠀰󠀱󠁿 Flag for Ajdovščina (SI-001) 🏴󠁳󠁩󠀰󠀱󠀰󠁿 Flag for Tišina (SI-010) 🏴󠁳󠁧󠀰󠀴󠁿 Flag for South East (SG-04) 🏴󠁳󠁩󠀰󠀰󠀸󠁿 Flag for Brezovica (SI-008) 🏴󠁳󠁨󠁨󠁬󠁿 Flag for Saint Helena (SH-HL) 🏴󠁳󠁥󠁺󠁿 Flag for Jämtland (SE-Z) 🏴󠁳󠁥󠁩󠁿 Flag for Gotland (SE-I) 🏴󠁳󠁥󠁷󠁿 Flag for Dalarna (SE-W) 🏴󠁳󠁥󠁫󠁿 Flag for Blekinge (SE-K) 🏴󠁳󠁩󠀰󠀰󠀵󠁿 Flag for Borovnica (SI-005) 🏴󠁳󠁨󠁴󠁡󠁿 Flag for Tristan da Cunha (SH-TA) 🏴󠁳󠁩󠀰󠀰󠀳󠁿 Flag for Bled (SI-003) 🏴󠁳󠁩󠀰󠀱󠀴󠁿 Flag for Cerkno (SI-014) 🏴󠁳󠁥󠁴󠁿 Flag for Örebro (SE-T) 🏴󠁳󠁩󠀰󠀲󠀳󠁿 Flag for Domžale (SI-023) 🏴󠁳󠁩󠀰󠀴󠀰󠁿 Flag for Izola (SI-040) 🏴󠁳󠁩󠀰󠀵󠀶󠁿 Flag for Kuzma (SI-056) 🏴󠁳󠁩󠀰󠀲󠀵󠁿 Flag for Dravograd (SI-025) 🏴󠁳󠁩󠀰󠀲󠀶󠁿 Flag for Duplek (SI-026) 🏴󠁳󠁩󠀰󠀴󠀱󠁿 Flag for Jesenice (SI-041) 🏴󠁳󠁩󠀰󠀲󠀸󠁿 Flag for Gorišnica (SI-028) 🏴󠁳󠁩󠀰󠀲󠀹󠁿 Flag for Gornja Radgona (SI-029) 🏴󠁳󠁩󠀰󠀲󠀰󠁿 Flag for Dobrepolje (SI-020) 🏴󠁳󠁩󠀰󠀳󠀱󠁿 Flag for Gornji Petrovci (SI-031) 🏴󠁳󠁩󠀰󠀲󠀴󠁿 Flag for Dornava (SI-024) 🏴󠁳󠁩󠀰󠀳󠀴󠁿 Flag for Hrastnik (SI-034) 🏴󠁳󠁩󠀰󠀳󠀹󠁿 Flag for Ivančna Gorica (SI-039) 🏴󠁳󠁩󠀰󠀴󠀹󠁿 Flag for Komen (SI-049) 🏴󠁳󠁩󠀰󠀵󠀱󠁿 Flag for Kozje (SI-051) 🏴󠁳󠁩󠀰󠀱󠀹󠁿 Flag for Divača (SI-019) 🏴󠁳󠁩󠀰󠀳󠀶󠁿 Flag for Idrija (SI-036) 🏴󠁳󠁩󠀰󠀴󠀵󠁿 Flag for Kidričevo (SI-045) 🏴󠁳󠁩󠀰󠀴󠀶󠁿 Flag for Kobarid (SI-046) 🏴󠁳󠁩󠀰󠀴󠀷󠁿 Flag for Kobilje (SI-047) 🏴󠁳󠁩󠀰󠀵󠀰󠁿 Flag for Koper (SI-050) 🏴󠁳󠁩󠀰󠀳󠀷󠁿 Flag for Ig (SI-037) 🏴󠁳󠁩󠀰󠀵󠀵󠁿 Flag for Kungota (SI-055) 🏴󠁳󠁩󠀰󠀳󠀲󠁿 Flag for Grosuplje (SI-032) 🏴󠁳󠁩󠀰󠀲󠀱󠁿 Flag for Dobrova–Polhov Gradec (SI-021) 🏴󠁳󠁩󠀰󠀴󠀲󠁿 Flag for Juršinci (SI-042) 🏴󠁳󠁩󠀰󠀵󠀴󠁿 Flag for Krško (SI-054) 🏴󠁳󠁩󠀰󠀳󠀳󠁿 Flag for Šalovci (SI-033) 🏴󠁳󠁩󠀰󠀵󠀳󠁿 Flag for Kranjska Gora (SI-053) 🏴󠁳󠁩󠀰󠀴󠀸󠁿 Flag for Kočevje (SI-048) 🏴󠁳󠁩󠀰󠀳󠀸󠁿 Flag for Ilirska Bistrica (SI-038) 🏴󠁳󠁩󠀰󠀴󠀳󠁿 Flag for Kamnik (SI-043) 🏴󠁳󠁩󠀰󠀳󠀵󠁿 Flag for Hrpelje–Kozina (SI-035) 🏴󠁳󠁩󠀰󠀳󠀰󠁿 Flag for Gornji Grad (SI-030) 🏴󠁳󠁩󠀰󠀴󠀴󠁿 Flag for Kanal (SI-044) 🏴󠁳󠁩󠀰󠀲󠀲󠁿 Flag for Dol pri Ljubljani (SI-022) 🏴󠁳󠁩󠀰󠀸󠀹󠁿 Flag for Pesnica (SI-089) 🏴󠁳󠁩󠀰󠀹󠀰󠁿 Flag for Piran (SI-090) 🏴󠁳󠁩󠀰󠀷󠀴󠁿 Flag for Mežica (SI-074) 🏴󠁳󠁩󠀰󠀸󠀱󠁿 Flag for Muta (SI-081) 🏴󠁳󠁩󠀰󠀶󠀲󠁿 Flag for Ljubno (SI-062) 🏴󠁳󠁩󠀰󠀸󠀷󠁿 Flag for Ormož (SI-087) 🏴󠁳󠁩󠀰󠀹󠀴󠁿 Flag for Postojna (SI-094) 🏴󠁳󠁩󠀰󠀷󠀶󠁿 Flag for Mislinja (SI-076) 👩🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁳󠁩󠀰󠀶󠀹󠁿 Flag for Majšperk (SI-069) 🏴󠁳󠁩󠀰󠀷󠀲󠁿 Flag for Mengeš (SI-072) 🏴󠁳󠁩󠀰󠀷󠀳󠁿 Flag for Metlika (SI-073) 🏴󠁳󠁩󠀰󠀷󠀷󠁿 Flag for Moravče (SI-077) 🏴󠁳󠁩󠀰󠀷󠀸󠁿 Flag for Moravske Toplice (SI-078) 🏴󠁳󠁩󠀰󠀶󠀱󠁿 Flag for Ljubljana (SI-061) 🏴󠁳󠁩󠀰󠀸󠀰󠁿 Flag for Murska Sobota (SI-080) 🏴󠁳󠁩󠀰󠀸󠀲󠁿 Flag for Naklo (SI-082) 🏴󠁳󠁩󠀰󠀸󠀴󠁿 Flag for Nova Gorica (SI-084) 🏴󠁳󠁩󠀰󠀸󠀸󠁿 Flag for Osilnica (SI-088) 🏴󠁳󠁩󠀰󠀹󠀱󠁿 Flag for Pivka (SI-091) 🏴󠁳󠁩󠀰󠀸󠀳󠁿 Flag for Nazarje (SI-083) 🏴󠁳󠁩󠀰󠀷󠀵󠁿 Flag for Miren–Kostanjevica (SI-075) 🏴󠁳󠁩󠀰󠀶󠀴󠁿 Flag for Logatec (SI-064) 🏴󠁳󠁩󠀰󠀶󠀰󠁿 Flag for Litija (SI-060) 🏴󠁳󠁩󠀰󠀷󠀰󠁿 Flag for Maribor (SI-070) 🏴󠁳󠁩󠀰󠀶󠀳󠁿 Flag for Ljutomer (SI-063) 🏴󠁳󠁩󠀰󠀶󠀶󠁿 Flag for Loški Potok (SI-066) 🏴󠁳󠁩󠀰󠀶󠀷󠁿 Flag for Luče (SI-067) 🏴󠁳󠁩󠀰󠀹󠀲󠁿 Flag for Podčetrtek (SI-092) 🏴󠁳󠁩󠀰󠀹󠀳󠁿 Flag for Podvelka (SI-093) 🏴󠁳󠁩󠀰󠀷󠀱󠁿 Flag for Medvode (SI-071) 🏴󠁳󠁩󠀰󠀶󠀵󠁿 Flag for Loška Dolina (SI-065) 🏴󠁳󠁩󠀰󠀵󠀷󠁿 Flag for Laško (SI-057) 🏴󠁳󠁩󠀰󠀵󠀹󠁿 Flag for Lendava (SI-059) 🏴󠁳󠁩󠀰󠀷󠀹󠁿 Flag for Mozirje (SI-079) 🏴󠁳󠁩󠀰󠀶󠀸󠁿 Flag for Lukovica (SI-068) 🏴󠁳󠁩󠀱󠀳󠀱󠁿 Flag for Tržič (SI-131) 🏴󠁳󠁩󠀱󠀱󠀸󠁿 Flag for Šentilj (SI-118) 🏴󠁳󠁩󠀰󠀹󠀸󠁿 Flag for Rače–Fram (SI-098) 🏴󠁳󠁩󠀰󠀹󠀷󠁿 Flag for Puconci (SI-097) 👩🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁳󠁩󠀱󠀰󠀵󠁿 Flag for Rogašovci (SI-105) 🏴󠁳󠁩󠀱󠀱󠀳󠁿 Flag for Slovenska Bistrica (SI-113) 🏴󠁳󠁩󠀱󠀰󠀷󠁿 Flag for Rogatec (SI-107) 🏴󠁳󠁩󠀰󠀹󠀶󠁿 Flag for Ptuj (SI-096) 🏴󠁳󠁩󠀱󠀱󠀹󠁿 Flag for Šentjernej (SI-119) 🏴󠁳󠁩󠀱󠀱󠀱󠁿 Flag for Sežana (SI-111) 🏴󠁳󠁩󠀱󠀲󠀳󠁿 Flag for Škofljica (SI-123) 🏴󠁳󠁩󠀱󠀱󠀲󠁿 Flag for Slovenj Gradec (SI-112) 🏴󠁳󠁩󠀱󠀱󠀵󠁿 Flag for Starše (SI-115) 🏴󠁳󠁩󠀱󠀱󠀶󠁿 Flag for Sveti Jurij (SI-116) 🏴󠁳󠁩󠀱󠀳󠀰󠁿 Flag for Trebnje (SI-130) 🏴󠁳󠁩󠀱󠀱󠀰󠁿 Flag for Sevnica (SI-110) 🏴󠁳󠁩󠀰󠀹󠀹󠁿 Flag for Radeče (SI-099) 🏴󠁳󠁩󠀱󠀲󠀱󠁿 Flag for Škocjan (SI-121) 🏴󠁳󠁩󠀱󠀲󠀴󠁿 Flag for Šmarje pri Jelšah (SI-124) 🏴󠁳󠁩󠀱󠀲󠀶󠁿 Flag for Šoštanj (SI-126) 🏴󠁳󠁩󠀱󠀲󠀷󠁿 Flag for Štore (SI-127) 🏴󠁳󠁩󠀱󠀰󠀶󠁿 Flag for Rogaška Slatina (SI-106) 🏴󠁳󠁩󠀰󠀹󠀵󠁿 Flag for Preddvor (SI-095) 🏴󠁳󠁩󠀱󠀳󠀲󠁿 Flag for Turnišče (SI-132) 🏴󠁳󠁩󠀱󠀰󠀲󠁿 Flag for Radovljica (SI-102) 🏴󠁳󠁩󠀱󠀰󠀸󠁿 Flag for Ruše (SI-108) 🏴󠁳󠁩󠀱󠀱󠀴󠁿 Flag for Slovenske Konjice (SI-114) 🏴󠁳󠁩󠀱󠀲󠀰󠁿 Flag for Šentjur (SI-120) 🏴󠁳󠁩󠀱󠀲󠀸󠁿 Flag for Tolmin (SI-128) 🏴󠁳󠁩󠀱󠀰󠀴󠁿 Flag for Ribnica (SI-104) 🏴󠁳󠁩󠀱󠀰󠀱󠁿 Flag for Radlje ob Dravi (SI-101) 🏴󠁳󠁩󠀱󠀲󠀹󠁿 Flag for Trbovlje (SI-129) 🏴󠁳󠁩󠀱󠀰󠀹󠁿 Flag for Semič (SI-109) 🏴󠁳󠁩󠀱󠀱󠀷󠁿 Flag for Šenčur (SI-117) 🏴󠁳󠁩󠀱󠀰󠀳󠁿 Flag for Ravne na Koroškem (SI-103) 🏴󠁳󠁩󠀱󠀶󠀹󠁿 Flag for Miklavž na Dravskem Polju (SI-169) 🏴󠁳󠁩󠀱󠀳󠀸󠁿 Flag for Vodice (SI-138) 🏴󠁳󠁩󠀱󠀳󠀳󠁿 Flag for Velenje (SI-133) 🏴󠁳󠁩󠀱󠀴󠀲󠁿 Flag for Zagorje ob Savi (SI-142) 🏴󠁳󠁩󠀱󠀴󠀱󠁿 Flag for Vuzenica (SI-141) 🏴󠁳󠁩󠀱󠀴󠀰󠁿 Flag for Vrhnika (SI-140) 👩🏻‍👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone 🏴󠁳󠁩󠀱󠀴󠀶󠁿 Flag for Železniki (SI-146) 🏴󠁳󠁩󠀱󠀴󠀷󠁿 Flag for Žiri (SI-147) 🏴󠁳󠁩󠀱󠀴󠀸󠁿 Flag for Benedikt (SI-148) 🏴󠁳󠁩󠀱󠀳󠀴󠁿 Flag for Velike Lašče (SI-134) 🏴󠁳󠁩󠀱󠀳󠀷󠁿 Flag for Vitanje (SI-137) 🏴󠁳󠁩󠀱󠀶󠀴󠁿 Flag for Komenda (SI-164) 🏴󠁳󠁩󠀱󠀵󠀵󠁿 Flag for Dobrna (SI-155) 🏴󠁳󠁩󠀱󠀵󠀶󠁿 Flag for Dobrovnik (SI-156) 🏴󠁳󠁩󠀱󠀵󠀷󠁿 Flag for Dolenjske Toplice (SI-157) 🏴󠁳󠁩󠀱󠀵󠀹󠁿 Flag for Hajdina (SI-159) 🏴󠁳󠁩󠀱󠀷󠀱󠁿 Flag for Oplotnica (SI-171) 🏴󠁳󠁩󠀱󠀳󠀵󠁿 Flag for Videm (SI-135) 🏴󠁳󠁩󠀱󠀶󠀳󠁿 Flag for Jezersko (SI-163) 🏴󠁳󠁩󠀱󠀵󠀲󠁿 Flag for Cankova (SI-152) 🏴󠁳󠁩󠀱󠀶󠀵󠁿 Flag for Kostel (SI-165) 🏴󠁳󠁩󠀱󠀶󠀶󠁿 Flag for Križevci (SI-166) 🏴󠁳󠁩󠀱󠀳󠀹󠁿 Flag for Vojnik (SI-139) 🏴󠁳󠁩󠀱󠀶󠀸󠁿 Flag for Markovci (SI-168) 🏴󠁳󠁩󠀱󠀷󠀰󠁿 Flag for Mirna Peč (SI-170) 🏴󠁳󠁩󠀱󠀳󠀶󠁿 Flag for Vipava (SI-136) 🏴󠁳󠁩󠀱󠀶󠀲󠁿 Flag for Horjul (SI-162) 🏴󠁳󠁩󠀱󠀵󠀳󠁿 Flag for Cerkvenjak (SI-153) 🏴󠁳󠁩󠀱󠀵󠀰󠁿 Flag for Bloke (SI-150) 🏴󠁳󠁩󠀱󠀴󠀳󠁿 Flag for Zavrč (SI-143) 🏴󠁳󠁩󠀱󠀴󠀹󠁿 Flag for Bistrica ob Sotli (SI-149) 🏴󠁳󠁩󠀱󠀴󠀴󠁿 Flag for Zreče (SI-144) 🏴󠁳󠁩󠀱󠀶󠀱󠁿 Flag for Hodoš (SI-161) 🏴󠁳󠁩󠀱󠀶󠀰󠁿 Flag for Hoče–Slivnica (SI-160) 🏴󠁳󠁩󠀱󠀵󠀸󠁿 Flag for Grad (SI-158) 🏴󠁳󠁩󠀱󠀷󠀲󠁿 Flag for Podlehnik (SI-172) 🏴󠁳󠁩󠀱󠀹󠀶󠁿 Flag for Cirkulane (SI-196) 🏴󠁳󠁩󠀱󠀷󠀴󠁿 Flag for Prebold (SI-174) 🏴󠁳󠁩󠀱󠀷󠀶󠁿 Flag for Razkrižje (SI-176) 🏴󠁳󠁩󠀱󠀸󠀸󠁿 Flag for Veržej (SI-188) 🏴󠁳󠁩󠀱󠀹󠀰󠁿 Flag for Žalec (SI-190) 🏴󠁳󠁩󠀱󠀸󠀰󠁿 Flag for Solčava (SI-180) 🏴󠁳󠁩󠀱󠀸󠀱󠁿 Flag for Sveta Ana (SI-181) 🏴󠁳󠁩󠀱󠀸󠀳󠁿 Flag for Šempeter–Vrtojba (SI-183) 🏴󠁳󠁩󠀱󠀸󠀵󠁿 Flag for Trnovska Vas (SI-185) 🏴󠁳󠁩󠀱󠀷󠀹󠁿 Flag for Sodražica (SI-179) 🏴󠁳󠁩󠀱󠀹󠀸󠁿 Flag for Makole (SI-198) 🏴󠁳󠁩󠀲󠀰󠀳󠁿 Flag for Straža (SI-203) 🏴󠁳󠁩󠀱󠀷󠀸󠁿 Flag for Selnica ob Dravi (SI-178) 🏴󠁳󠁩󠀱󠀹󠀳󠁿 Flag for Žužemberk (SI-193) 🏴󠁳󠁩󠀱󠀹󠀷󠁿 Flag for Kostanjevica na Krki (SI-197) 🏴󠁳󠁩󠀱󠀷󠀵󠁿 Flag for Prevalje (SI-175) 🏴󠁳󠁩󠀱󠀹󠀴󠁿 Flag for Šmartno pri Litiji (SI-194) 🏴󠁳󠁩󠀱󠀹󠀱󠁿 Flag for Žetale (SI-191) 🏴󠁳󠁩󠀱󠀸󠀹󠁿 Flag for Vransko (SI-189) 🏴󠁳󠁩󠀲󠀰󠀱󠁿 Flag for Renče–Vogrsko (SI-201) 🏴󠁳󠁩󠀲󠀰󠀲󠁿 Flag for Središče ob Dravi (SI-202) 🏴󠁳󠁩󠀱󠀸󠀶󠁿 Flag for Trzin (SI-186) 🏴󠁳󠁩󠀲󠀰󠀴󠁿 Flag for Sveta Trojica v Slovenskih Goricah (SI-204) 🏴󠁳󠁩󠀲󠀰󠀵󠁿 Flag for Sveti Tomaž (SI-205) 🏴󠁳󠁩󠀱󠀷󠀷󠁿 Flag for Ribnica na Pohorju (SI-177) 🏴󠁳󠁩󠀲󠀰󠀷󠁿 Flag for Gorje (SI-207) 🏴󠁳󠁩󠀱󠀸󠀴󠁿 Flag for Tabor (SI-184) 🏴󠁳󠁩󠀱󠀹󠀹󠁿 Flag for Mokronog–Trebelno (SI-199) 🏴󠁳󠁩󠀱󠀷󠀳󠁿 Flag for Polzela (SI-173) 🏴󠁳󠁩󠀲󠀰󠀰󠁿 Flag for Poljčane (SI-200) 🏴󠁳󠁩󠀱󠀹󠀵󠁿 Flag for Apače (SI-195) 🏴󠁳󠁩󠀱󠀸󠀷󠁿 Flag for Velika Polana (SI-187) 🏴󠁳󠁫󠁴󠁡󠁿 Flag for Trnava (SK-TA) 🏴󠁳󠁩󠀲󠀰󠀹󠁿 Flag for Rečica ob Savinji (SI-209) 🏴󠁳󠁭󠀰󠀹󠁿 Flag for Serravalle (SM-09) 🏴󠁳󠁭󠀰󠀲󠁿 Flag for Chiesanuova (SM-02) 🏴󠁳󠁮󠁫󠁡󠁿 Flag for Kaffrine (SN-KA) 🏴󠁳󠁫󠁮󠁩󠁿 Flag for Nitra (SK-NI) 🏴󠁳󠁩󠀲󠀱󠀱󠁿 Flag for Šentrupert (SI-211) 🏴󠁳󠁭󠀰󠀶󠁿 Flag for Borgo Maggiore (SM-06) 🏴󠁳󠁫󠁫󠁩󠁿 Flag for Košice (SK-KI) 🏴󠁳󠁫󠁢󠁣󠁿 Flag for Banská Bystrica (SK-BC) 🏴󠁳󠁭󠀰󠀸󠁿 Flag for Montegiardino (SM-08) 🏴󠁳󠁮󠁤󠁫󠁿 Flag for Dakar (SN-DK) 🏴󠁳󠁫󠁰󠁶󠁿 Flag for Prešov (SK-PV) 🏴󠁳󠁩󠀲󠀱󠀲󠁿 Flag for Mirna (SI-212) 🏴󠁳󠁭󠀰󠀵󠁿 Flag for Fiorentino (SM-05) 🏴󠁳󠁮󠁴󠁨󠁿 Flag for Thiès (SN-TH) 🏴󠁳󠁩󠀲󠀱󠀳󠁿 Flag for Ankaran (SI-213) 🏴󠁳󠁮󠁴󠁣󠁿 Flag for Tambacounda (SN-TC) 🏴󠁳󠁮󠁦󠁫󠁿 Flag for Fatick (SN-FK) 🏴󠁳󠁫󠁴󠁣󠁿 Flag for Trenčín (SK-TC) 🏴󠁳󠁮󠁫󠁬󠁿 Flag for Kaolack (SN-KL) 🏴󠁳󠁭󠀰󠀴󠁿 Flag for Faetano (SM-04) 🏴󠁳󠁫󠁺󠁩󠁿 Flag for Žilina (SK-ZI) 🏴󠁳󠁬󠁳󠁿 Flag for Southern (SL-S) 🏴󠁳󠁮󠁳󠁥󠁿 Flag for Sédhiou (SN-SE) 🏴󠁳󠁫󠁢󠁬󠁿 Flag for Bratislava (SK-BL) 🏴󠁳󠁮󠁤󠁢󠁿 Flag for Diourbel (SN-DB) 🏴󠁳󠁮󠁫󠁥󠁿 Flag for Kédougou (SN-KE) 🏴󠁳󠁬󠁮󠁿 Flag for Northern (SL-N) 🏴󠁳󠁬󠁷󠁿 Flag for Western Area (SL-W) 🏴󠁳󠁮󠁭󠁴󠁿 Flag for Matam (SN-MT) 🏴󠁳󠁬󠁥󠁿 Flag for Eastern (SL-E) 🏴󠁳󠁭󠀰󠀱󠁿 Flag for Acquaviva (SM-01) 🏴󠁳󠁮󠁫󠁤󠁿 Flag for Kolda (SN-KD) 🏴󠁳󠁮󠁳󠁬󠁿 Flag for Saint-Louis (SN-SL) 🏴󠁳󠁭󠀰󠀷󠁿 Flag for San Marino (SM-07) 🏴󠁳󠁮󠁬󠁧󠁿 Flag for Louga (SN-LG) 🏴󠁳󠁭󠀰󠀳󠁿 Flag for Domagnano (SM-03) 🏴󠁳󠁳󠁥󠁥󠁿 Flag for Eastern Equatoria (SS-EE) 🏴󠁳󠁲󠁳󠁡󠁿 Flag for Saramacca (SR-SA) 👩🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁳󠁲󠁭󠁡󠁿 Flag for Marowijne (SR-MA) 🏴󠁳󠁯󠁪󠁤󠁿 Flag for Middle Juba (SO-JD) 🏴󠁳󠁯󠁭󠁵󠁿 Flag for Mudug (SO-MU) 🏴󠁳󠁯󠁳󠁨󠁿 Flag for Lower Shebelle (SO-SH) 🏴󠁳󠁯󠁨󠁩󠁿 Flag for Hiran (SO-HI) 🏴󠁳󠁳󠁥󠁣󠁿 Flag for Central Equatoria (SS-EC) 🏴󠁳󠁮󠁺󠁧󠁿 Flag for Ziguinchor (SN-ZG) 🏴󠁳󠁲󠁣󠁲󠁿 Flag for Coronie (SR-CR) 🏴󠁳󠁯󠁳󠁤󠁿 Flag for Middle Shebelle (SO-SD) 🏴󠁳󠁳󠁮󠁵󠁿 Flag for Upper Nile (SS-NU) 🏴󠁳󠁲󠁷󠁡󠁿 Flag for Wanica (SR-WA) 🏴󠁳󠁯󠁡󠁷󠁿 Flag for Awdal (SO-AW) 🏴󠁳󠁯󠁳󠁡󠁿 Flag for Sanaag (SO-SA) 🏴󠁳󠁯󠁪󠁨󠁿 Flag for Lower Juba (SO-JH) 🏴󠁳󠁳󠁬󠁫󠁿 Flag for Lakes (SS-LK) 🏴󠁳󠁳󠁷󠁲󠁿 Flag for Warrap (SS-WR) 🏴󠁳󠁴󠁰󠁿 Flag for Príncipe (ST-P) 🏴󠁳󠁲󠁳󠁩󠁿 Flag for Sipaliwini (SR-SI) 🏴󠁳󠁳󠁢󠁷󠁿 Flag for Western Bahr el Ghazal (SS-BW) 🏴󠁳󠁳󠁥󠁷󠁿 Flag for Western Equatoria (SS-EW) 🏴󠁳󠁯󠁢󠁲󠁿 Flag for Bari (SO-BR) 🏴󠁳󠁳󠁪󠁧󠁿 Flag for Jonglei (SS-JG) 🏴󠁳󠁲󠁰󠁭󠁿 Flag for Paramaribo (SR-PM) 🏴󠁳󠁲󠁣󠁭󠁿 Flag for Commewijne (SR-CM) 🏴󠁳󠁯󠁧󠁡󠁿 Flag for Galguduud (SO-GA) 🏴󠁳󠁲󠁮󠁩󠁿 Flag for Nickerie (SR-NI) 🏴󠁳󠁲󠁰󠁲󠁿 Flag for Para (SR-PR) 🏴󠁳󠁯󠁷󠁯󠁿 Flag for Woqooyi Galbeed (SO-WO) 🏴󠁳󠁯󠁧󠁥󠁿 Flag for Gedo (SO-GE) 🏴󠁳󠁯󠁢󠁹󠁿 Flag for Bay, Somalia (SO-BY) 🏴󠁳󠁲󠁢󠁲󠁿 Flag for Brokopondo (SR-BR) 🏴󠁳󠁯󠁮󠁵󠁿 Flag for Nugal (SO-NU) 🏴󠁳󠁯󠁴󠁯󠁿 Flag for Togdheer (SO-TO) 🏴󠁳󠁯󠁢󠁫󠁿 Flag for Bakool (SO-BK) 🏴󠁳󠁯󠁳󠁯󠁿 Flag for Sool (SO-SO) 🏴󠁳󠁺󠁨󠁨󠁿 Flag for Hhohho (SZ-HH) 🏴󠁴󠁤󠁥󠁯󠁿 Flag for Ennedi-Ouest (TD-EO) 🏴󠁴󠁤󠁧󠁲󠁿 Flag for Guéra (TD-GR) 🏴󠁳󠁺󠁳󠁨󠁿 Flag for Shiselweni (SZ-SH) 🏴󠁳󠁹󠁤󠁲󠁿 Flag for Daraa (SY-DR) 🏴󠁳󠁹󠁲󠁡󠁿 Flag for Ar-Raqqah (SY-RA) 🏴󠁳󠁶󠁳󠁯󠁿 Flag for Sonsonate (SV-SO) 🏴󠁳󠁶󠁵󠁮󠁿 Flag for La Unión (SV-UN) 🏴󠁳󠁶󠁳󠁭󠁿 Flag for San Miguel (SV-SM) 🏴󠁳󠁶󠁭󠁯󠁿 Flag for Morazán (SV-MO) 🏴󠁳󠁶󠁳󠁳󠁿 Flag for San Salvador (SV-SS) 🏴󠁳󠁹󠁤󠁹󠁿 Flag for Deir ez-Zor (SY-DY) 🏴󠁳󠁶󠁣󠁡󠁿 Flag for Cabañas (SV-CA) 🏴󠁳󠁺󠁬󠁵󠁿 Flag for Lubombo (SZ-LU) 🏴󠁳󠁶󠁣󠁨󠁿 Flag for Chalatenango (SV-CH) 🏴󠁳󠁹󠁲󠁤󠁿 Flag for Rif Dimashq (SY-RD) 🏴󠁳󠁹󠁴󠁡󠁿 Flag for Tartus (SY-TA) 🏴󠁴󠁤󠁢󠁯󠁿 Flag for Borkou (TD-BO) 🏴󠁳󠁺󠁭󠁡󠁿 Flag for Manzini (SZ-MA) 🏴󠁴󠁤󠁢󠁡󠁿 Flag for Batha (TD-BA) 🏴󠁳󠁹󠁨󠁩󠁿 Flag for Homs (SY-HI) 🏴󠁴󠁤󠁥󠁥󠁿 Flag for Ennedi-Est (TD-EE) 🏴󠁴󠁤󠁢󠁧󠁿 Flag for Bahr el Gazel (TD-BG) 🏴󠁴󠁤󠁫󠁡󠁿 Flag for Kanem (TD-KA) 🏴󠁳󠁹󠁨󠁭󠁿 Flag for Hama (SY-HM) 🏴󠁳󠁹󠁬󠁡󠁿 Flag for Latakia (SY-LA) 🏴󠁳󠁹󠁩󠁤󠁿 Flag for Idlib (SY-ID) 🏴󠁳󠁶󠁬󠁩󠁿 Flag for La Libertad (SV-LI) 🏴󠁳󠁹󠁨󠁬󠁿 Flag for Aleppo (SY-HL) 🏴󠁳󠁶󠁡󠁨󠁿 Flag for Ahuachapán (SV-AH) 🏴󠁴󠁤󠁣󠁢󠁿 Flag for Chari-Baguirmi (TD-CB) 🏴󠁳󠁶󠁰󠁡󠁿 Flag for La Paz (SV-PA) 🏴󠁳󠁹󠁳󠁵󠁿 Flag for As-Suwayda (SY-SU) 🏴󠁳󠁹󠁤󠁩󠁿 Flag for Damascus (SY-DI) 🏴󠁳󠁹󠁱󠁵󠁿 Flag for Quneitra (SY-QU) 🏴󠁳󠁹󠁨󠁡󠁿 Flag for Al-Hasakah (SY-HA) 🏴󠁳󠁶󠁳󠁡󠁿 Flag for Santa Ana (SV-SA) 🏴󠁳󠁶󠁣󠁵󠁿 Flag for Cuscatlán (SV-CU) 🏴󠁴󠁤󠁬󠁯󠁿 Flag for Logone Occidental (TD-LO) 🏴󠁴󠁨󠀲󠀲󠁿 Flag for Chanthaburi (TH-22) 🏴󠁴󠁤󠁭󠁥󠁿 Flag for Mayo-Kebbi Est (TD-ME) 🏴󠁴󠁤󠁭󠁣󠁿 Flag for Moyen-Chari (TD-MC) 🏴󠁴󠁤󠁬󠁲󠁿 Flag for Logone Oriental (TD-LR) 🏴󠁴󠁧󠁳󠁿 Flag for Savanes (TG-S) 🏴󠁴󠁨󠀱󠀴󠁿 Flag for Phra Nakhon Si Ayutthaya (TH-14) 🏴󠁴󠁧󠁣󠁿 Flag for Centrale (TG-C) 🏴󠁴󠁨󠀲󠀷󠁿 Flag for Sa Kaeo (TH-27) 🏴󠁴󠁨󠀱󠀲󠁿 Flag for Nonthaburi (TH-12) 🏴󠁴󠁨󠀳󠀱󠁿 Flag for Buri Ram (TH-31) 🏴󠁴󠁨󠀲󠀰󠁿 Flag for Chon Buri (TH-20) 🏴󠁴󠁤󠁳󠁩󠁿 Flag for Sila (TD-SI) 🏴󠁴󠁤󠁬󠁣󠁿 Flag for Lac (TD-LC) 🏴󠁴󠁨󠀲󠀱󠁿 Flag for Rayong (TH-21) 🏴󠁴󠁨󠀲󠀵󠁿 Flag for Prachin Buri (TH-25) 🏴󠁴󠁨󠀳󠀰󠁿 Flag for Nakhon Ratchasima (TH-30) 🏴󠁴󠁧󠁫󠁿 Flag for Kara (TG-K) 🏴󠁴󠁨󠀱󠀵󠁿 Flag for Ang Thong (TH-15) 🏴󠁴󠁨󠀱󠀰󠁿 Flag for Bangkok (TH-10) 🏴󠁴󠁤󠁭󠁡󠁿 Flag for Mandoul (TD-MA) 🏴󠁴󠁨󠀱󠀳󠁿 Flag for Pathum Thani (TH-13) 🏴󠁴󠁨󠀲󠀴󠁿 Flag for Chachoengsao (TH-24) 🏴󠁴󠁨󠀱󠀷󠁿 Flag for Sing Buri (TH-17) 🏴󠁴󠁤󠁭󠁯󠁿 Flag for Mayo-Kebbi Ouest (TD-MO) 🏴󠁴󠁤󠁯󠁤󠁿 Flag for Ouaddaï (TD-OD) 🏴󠁴󠁨󠀳󠀲󠁿 Flag for Surin (TH-32) 🏴󠁴󠁨󠀲󠀶󠁿 Flag for Nakhon Nayok (TH-26) 🏴󠁴󠁤󠁳󠁡󠁿 Flag for Salamat (TD-SA) 🏴󠁴󠁤󠁴󠁡󠁿 Flag for Tandjilé (TD-TA) 🏴󠁴󠁤󠁷󠁦󠁿 Flag for Wadi Fira (TD-WF) 🏴󠁴󠁨󠀱󠀹󠁿 Flag for Saraburi (TH-19) 🏴󠁴󠁨󠀱󠀱󠁿 Flag for Samut Prakan (TH-11) 🏴󠁴󠁤󠁴󠁩󠁿 Flag for Tibesti (TD-TI) 🏴󠁴󠁧󠁰󠁿 Flag for Plateaux (TG-P) 🏴󠁴󠁤󠁮󠁤󠁿 Flag for N’Djamena (TD-ND) 🏴󠁴󠁨󠀱󠀸󠁿 Flag for Chai Nat (TH-18) 🏴󠁴󠁨󠀶󠀲󠁿 Flag for Kamphaeng Phet (TH-62) 🏴󠁴󠁨󠀷󠀲󠁿 Flag for Suphanburi (TH-72) 🏴󠁴󠁨󠀷󠀴󠁿 Flag for Samut Sakhon (TH-74) 🏴󠁴󠁨󠀶󠀷󠁿 Flag for Phetchabun (TH-67) 🏴󠁴󠁨󠀷󠀱󠁿 Flag for Kanchanaburi (TH-71) 🏴󠁴󠁨󠀵󠀴󠁿 Flag for Phrae (TH-54) 🏴󠁴󠁨󠀶󠀳󠁿 Flag for Tak (TH-63) 🏴󠁴󠁨󠀴󠀸󠁿 Flag for Nakhon Phanom (TH-48) 🏴󠁴󠁨󠀵󠀲󠁿 Flag for Lampang (TH-52) 🏴󠁴󠁨󠀵󠀸󠁿 Flag for Mae Hong Son (TH-58) 🏴󠁴󠁨󠀴󠀷󠁿 Flag for Sakon Nakhon (TH-47) 🏴󠁴󠁨󠀵󠀶󠁿 Flag for Phayao (TH-56) 🏴󠁴󠁨󠀴󠀱󠁿 Flag for Udon Thani (TH-41) 🏴󠁴󠁨󠀴󠀹󠁿 Flag for Mukdahan (TH-49) 🏴󠁴󠁨󠀷󠀳󠁿 Flag for Nakhon Pathom (TH-73) 🏴󠁴󠁨󠀵󠀰󠁿 Flag for Chiang Mai (TH-50) 🏴󠁴󠁨󠀴󠀰󠁿 Flag for Khon Kaen (TH-40) 🏴󠁴󠁨󠀳󠀷󠁿 Flag for Amnat Charoen (TH-37) 🏴󠁴󠁨󠀷󠀰󠁿 Flag for Ratchaburi (TH-70) 🏴󠁴󠁨󠀳󠀵󠁿 Flag for Yasothon (TH-35) 🏴󠁴󠁨󠀵󠀱󠁿 Flag for Lamphun (TH-51) 🏴󠁴󠁨󠀴󠀲󠁿 Flag for Loei (TH-42) 🏴󠁴󠁨󠀶󠀰󠁿 Flag for Nakhon Sawan (TH-60) 🏴󠁴󠁨󠀳󠀴󠁿 Flag for Ubon Ratchathani (TH-34) 🏴󠁴󠁨󠀴󠀴󠁿 Flag for Maha Sarakham (TH-44) 🏴󠁴󠁨󠀴󠀵󠁿 Flag for Roi Et (TH-45) 🏴󠁴󠁨󠀴󠀶󠁿 Flag for Kalasin (TH-46) 🏴󠁴󠁨󠀶󠀶󠁿 Flag for Phichit (TH-66) 🏴󠁴󠁨󠀵󠀵󠁿 Flag for Nan (TH-55) 🏴󠁴󠁨󠀶󠀱󠁿 Flag for Uthai Thani (TH-61) 🏴󠁴󠁨󠀳󠀸󠁿 Flag for Bueng Kan (TH-38) 🏴󠁴󠁨󠀳󠀳󠁿 Flag for Si Sa Ket (TH-33) 🏴󠁴󠁨󠀳󠀹󠁿 Flag for Nong Bua Lam Phu (TH-39) 🏴󠁴󠁨󠀵󠀳󠁿 Flag for Uttaradit (TH-53) 🏴󠁴󠁨󠀵󠀷󠁿 Flag for Chiang Rai (TH-57) 🏴󠁴󠁨󠀶󠀴󠁿 Flag for Sukhothai (TH-64) 🏴󠁴󠁨󠀴󠀳󠁿 Flag for Nong Khai (TH-43) 🏴󠁴󠁨󠀶󠀵󠁿 Flag for Phitsanulok (TH-65) 🏴󠁴󠁬󠁥󠁲󠁿 Flag for Ermera (TL-ER) 🏴󠁴󠁬󠁯󠁥󠁿 Flag for Oecusse (TL-OE) 🏴󠁴󠁬󠁬󠁩󠁿 Flag for Liquiçá (TL-LI) 🏴󠁴󠁬󠁡󠁬󠁿 Flag for Aileu (TL-AL) 🏴󠁴󠁭󠁡󠁿 Flag for Ahal (TM-A) 🏴󠁴󠁨󠀸󠀴󠁿 Flag for Surat Thani (TH-84) 🏴󠁴󠁨󠀷󠀶󠁿 Flag for Phetchaburi (TH-76) 🏴󠁴󠁬󠁢󠁯󠁿 Flag for Bobonaro (TL-BO) 🏴󠁴󠁬󠁭󠁴󠁿 Flag for Manatuto (TL-MT) 🏴󠁴󠁪󠁫󠁴󠁿 Flag for Khatlon (TJ-KT) 🏴󠁴󠁬󠁡󠁮󠁿 Flag for Ainaro (TL-AN) 🏴󠁴󠁨󠀸󠀲󠁿 Flag for Phang Nga (TH-82) 🏴󠁴󠁬󠁣󠁯󠁿 Flag for Cova Lima (TL-CO) 🏴󠁴󠁮󠀱󠀱󠁿 Flag for Tunis (TN-11) 🏴󠁴󠁨󠀸󠀵󠁿 Flag for Ranong (TH-85) 🏴󠁴󠁨󠀸󠀰󠁿 Flag for Nakhon Si Thammarat (TH-80) 🏴󠁴󠁨󠀷󠀷󠁿 Flag for Prachuap Khiri Khan (TH-77) 🏴󠁴󠁪󠁤󠁵󠁿 Flag for Dushanbe (TJ-DU) 🏴󠁴󠁨󠀹󠀵󠁿 Flag for Yala (TH-95) 🏴󠁴󠁨󠀹󠀰󠁿 Flag for Songkhla (TH-90) 🏴󠁴󠁭󠁬󠁿 Flag for Lebap (TM-L) 🏴󠁴󠁨󠀹󠀶󠁿 Flag for Narathiwat (TH-96) 🏴󠁴󠁭󠁭󠁿 Flag for Mary (TM-M) 🏴󠁴󠁬󠁭󠁦󠁿 Flag for Manufahi (TL-MF) 👨🏼‍👨🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁴󠁭󠁢󠁿 Flag for Balkan (TM-B) 🏴󠁴󠁬󠁢󠁡󠁿 Flag for Baucau (TL-BA) 🏴󠁴󠁪󠁲󠁡󠁿 Flag for Nohiyahoi Tobei Jumhurí (TJ-RA) 🏴󠁴󠁨󠀹󠀲󠁿 Flag for Trang (TH-92) 🏴󠁴󠁪󠁳󠁵󠁿 Flag for Sughd (TJ-SU) 🏴󠁴󠁬󠁶󠁩󠁿 Flag for Viqueque (TL-VI) 🏴󠁴󠁨󠀹󠀴󠁿 Flag for Pattani (TH-94) 🏴󠁴󠁨󠀸󠀱󠁿 Flag for Krabi (TH-81) 🏴󠁴󠁬󠁤󠁩󠁿 Flag for Dili (TL-DI) 🏴󠁴󠁨󠀸󠀳󠁿 Flag for Phuket (TH-83) 🏴󠁴󠁨󠀹󠀱󠁿 Flag for Satun (TH-91) 🏴󠁴󠁨󠁳󠁿 Flag for Pattaya (TH-S) 🏴󠁴󠁭󠁤󠁿 Flag for Daşoguz (TM-D) 🏴󠁴󠁮󠀴󠀱󠁿 Flag for Kairouan (TN-41) 🏴󠁴󠁮󠀵󠀲󠁿 Flag for Monastir (TN-52) 🏴󠁴󠁲󠀰󠀹󠁿 Flag for Aydın (TR-09) 🏴󠁴󠁮󠀳󠀱󠁿 Flag for Béja (TN-31) 🏴󠁴󠁲󠀰󠀷󠁿 Flag for Antalya (TR-07) 🏴󠁴󠁮󠀲󠀱󠁿 Flag for Nabeul (TN-21) 🏴󠁴󠁮󠀵󠀳󠁿 Flag for Mahdia (TN-53) 🏴󠁴󠁯󠀰󠀲󠁿 Flag for Haʻapai (TO-02) 🏴󠁴󠁲󠀰󠀵󠁿 Flag for Amasya (TR-05) 🏴󠁴󠁲󠀱󠀳󠁿 Flag for Bitlis (TR-13) 🏴󠁴󠁮󠀱󠀲󠁿 Flag for Ariana (TN-12) 🏴󠁴󠁮󠀷󠀳󠁿 Flag for Kebili (TN-73) 🏴󠁴󠁲󠀰󠀱󠁿 Flag for Adana (TR-01) 🏴󠁴󠁯󠀰󠀱󠁿 Flag for ʻEua (TO-01) 🏴󠁴󠁲󠀱󠀲󠁿 Flag for Bingöl (TR-12) 🏴󠁴󠁮󠀸󠀳󠁿 Flag for Tataouine (TN-83) 🏴󠁴󠁲󠀰󠀸󠁿 Flag for Artvin (TR-08) 🏴󠁴󠁮󠀵󠀱󠁿 Flag for Sousse (TN-51) 🏴󠁴󠁮󠀸󠀱󠁿 Flag for Gabès (TN-81) 🏴󠁴󠁲󠀰󠀴󠁿 Flag for Ağrı (TR-04) 🏴󠁴󠁲󠀱󠀱󠁿 Flag for Bilecik (TR-11) 🏴󠁴󠁮󠀳󠀲󠁿 Flag for Jendouba (TN-32) 🏴󠁴󠁯󠀰󠀴󠁿 Flag for Tongatapu (TO-04) 🏴󠁴󠁲󠀰󠀲󠁿 Flag for Adıyaman (TR-02) 🏴󠁴󠁮󠀳󠀳󠁿 Flag for Kef (TN-33) 🏴󠁴󠁮󠀲󠀲󠁿 Flag for Zaghouan (TN-22) 🏴󠁴󠁲󠀱󠀰󠁿 Flag for Balıkesir (TR-10) 🏴󠁴󠁮󠀱󠀳󠁿 Flag for Ben Arous (TN-13) 🏴󠁴󠁯󠀰󠀳󠁿 Flag for Niuas (TO-03) 🏴󠁴󠁮󠀷󠀲󠁿 Flag for Tozeur (TN-72) 🏴󠁴󠁮󠀱󠀴󠁿 Flag for Manouba (TN-14) 🏴󠁴󠁮󠀴󠀲󠁿 Flag for Kasserine (TN-42) 🏴󠁴󠁲󠀱󠀴󠁿 Flag for Bolu (TR-14) 🏴󠁴󠁮󠀳󠀴󠁿 Flag for Siliana (TN-34) 🏴󠁴󠁯󠀰󠀵󠁿 Flag for Vavaʻu (TO-05) 🏴󠁴󠁲󠀰󠀶󠁿 Flag for Ankara (TR-06) 🏴󠁴󠁮󠀶󠀱󠁿 Flag for Sfax (TN-61) 🏴󠁴󠁮󠀴󠀳󠁿 Flag for Sidi Bouzid (TN-43) 🏴󠁴󠁮󠀸󠀲󠁿 Flag for Medenine (TN-82) 🏴󠁴󠁮󠀲󠀳󠁿 Flag for Bizerte (TN-23) 🏴󠁴󠁲󠀲󠀴󠁿 Flag for Erzincan (TR-24) 🏴󠁴󠁲󠀴󠀶󠁿 Flag for Kahramanmaraş (TR-46) 🏴󠁴󠁲󠀳󠀶󠁿 Flag for Kars (TR-36) 🏴󠁴󠁲󠀵󠀱󠁿 Flag for Niğde (TR-51) 🏴󠁴󠁲󠀳󠀸󠁿 Flag for Kayseri (TR-38) 🏴󠁴󠁲󠀴󠀱󠁿 Flag for Kocaeli (TR-41) 🏴󠁴󠁲󠀱󠀸󠁿 Flag for Çankırı (TR-18) 🏴󠁴󠁲󠀴󠀸󠁿 Flag for Muğla (TR-48) 🏴󠁴󠁲󠀴󠀲󠁿 Flag for Konya (TR-42) 🏴󠁴󠁲󠀴󠀴󠁿 Flag for Malatya (TR-44) 🏴󠁴󠁲󠀲󠀹󠁿 Flag for Gümüşhane (TR-29) 🏴󠁴󠁲󠀲󠀲󠁿 Flag for Edirne (TR-22) 🏴󠁴󠁲󠀳󠀹󠁿 Flag for Kırklareli (TR-39) 🏴󠁴󠁲󠀲󠀷󠁿 Flag for Gaziantep (TR-27) 🏴󠁴󠁲󠀵󠀵󠁿 Flag for Samsun (TR-55) 🏴󠁴󠁲󠀲󠀱󠁿 Flag for Diyarbakır (TR-21) 🏴󠁴󠁲󠀱󠀶󠁿 Flag for Bursa (TR-16) 🏴󠁴󠁲󠀱󠀹󠁿 Flag for Çorum (TR-19) 🏴󠁴󠁲󠀵󠀲󠁿 Flag for Ordu (TR-52) 🏴󠁴󠁲󠀴󠀵󠁿 Flag for Manisa (TR-45) 🏴󠁴󠁲󠀲󠀵󠁿 Flag for Erzurum (TR-25) 🏴󠁴󠁲󠀱󠀵󠁿 Flag for Burdur (TR-15) 🏴󠁴󠁲󠀳󠀲󠁿 Flag for Isparta (TR-32) 🏴󠁴󠁲󠀳󠀴󠁿 Flag for Istanbul (TR-34) 🏴󠁴󠁲󠀳󠀰󠁿 Flag for Hakkâri (TR-30) 🏴󠁴󠁲󠀳󠀱󠁿 Flag for Hatay (TR-31) 🏴󠁴󠁲󠀴󠀹󠁿 Flag for Muş (TR-49) 🏴󠁴󠁲󠀳󠀳󠁿 Flag for Mersin (TR-33) 🏴󠁴󠁲󠀵󠀶󠁿 Flag for Siirt (TR-56) 🏴󠁴󠁲󠀵󠀰󠁿 Flag for Nevşehir (TR-50) 🏴󠁴󠁲󠀲󠀳󠁿 Flag for Elazığ (TR-23) 🏴󠁴󠁲󠀲󠀸󠁿 Flag for Giresun (TR-28) 🏴󠁴󠁲󠀲󠀰󠁿 Flag for Denizli (TR-20) 🏴󠁴󠁲󠀴󠀷󠁿 Flag for Mardin (TR-47) 🏴󠁴󠁲󠀳󠀷󠁿 Flag for Kastamonu (TR-37) 🏴󠁴󠁲󠀵󠀴󠁿 Flag for Sakarya (TR-54) 🏴󠁴󠁲󠀴󠀰󠁿 Flag for Kırşehir (TR-40) 🏴󠁴󠁲󠀱󠀷󠁿 Flag for Çanakkale (TR-17) 🏴󠁴󠁲󠀵󠀳󠁿 Flag for Rize (TR-53) 🏴󠁴󠁲󠀲󠀶󠁿 Flag for Eskişehir (TR-26) 🏴󠁴󠁲󠀶󠀵󠁿 Flag for Van (TR-65) 🏴󠁴󠁴󠁰󠁲󠁴󠁿 Flag for Princes Town (TT-PRT) 🏴󠁴󠁴󠁣󠁴󠁴󠁿 Flag for Couva-Tabaquite-Talparo (TT-CTT) 🏴󠁴󠁴󠁴󠁯󠁢󠁿 Flag for Tobago (TT-TOB) 🏴󠁴󠁲󠀶󠀳󠁿 Flag for Şanlıurfa (TR-63) 🏴󠁴󠁴󠁡󠁲󠁩󠁿 Flag for Arima (TT-ARI) 🏴󠁴󠁲󠀶󠀷󠁿 Flag for Zonguldak (TR-67) 🏴󠁴󠁴󠁳󠁩󠁰󠁿 Flag for Siparia (TT-SIP) 🏴󠁴󠁲󠀷󠀵󠁿 Flag for Ardahan (TR-75) 🏴󠁴󠁲󠀷󠀹󠁿 Flag for Kilis (TR-79) 🏴󠁴󠁴󠁰󠁯󠁳󠁿 Flag for Port of Spain (TT-POS) 🏴󠁴󠁲󠀶󠀸󠁿 Flag for Aksaray (TR-68) 🏴󠁴󠁴󠁤󠁭󠁮󠁿 Flag for Diego Martin (TT-DMN) 🏴󠁴󠁲󠀶󠀹󠁿 Flag for Bayburt (TR-69) 🏴󠁴󠁲󠀵󠀹󠁿 Flag for Tekirdağ (TR-59) 🏴󠁴󠁲󠀷󠀲󠁿 Flag for Batman (TR-72) 🏴󠁴󠁴󠁣󠁨󠁡󠁿 Flag for Chaguanas (TT-CHA) 🏴󠁴󠁲󠀸󠀰󠁿 Flag for Osmaniye (TR-80) 🏴󠁴󠁲󠀷󠀷󠁿 Flag for Yalova (TR-77) 🏴󠁴󠁴󠁳󠁪󠁬󠁿 Flag for San Juan-Laventille (TT-SJL) 🏴󠁴󠁲󠀷󠀸󠁿 Flag for Karabük (TR-78) 🏴󠁴󠁲󠀶󠀶󠁿 Flag for Yozgat (TR-66) 🏴󠁴󠁴󠁭󠁲󠁣󠁿 Flag for Mayaro-Rio Claro (TT-MRC) 🏴󠁴󠁲󠀶󠀴󠁿 Flag for Uşak (TR-64) 🏴󠁴󠁲󠀵󠀷󠁿 Flag for Sinop (TR-57) 🏴󠁴󠁴󠁴󠁵󠁰󠁿 Flag for Tunapuna-Piarco (TT-TUP) 🏴󠁴󠁲󠀷󠀴󠁿 Flag for Bartın (TR-74) 🏴󠁴󠁲󠀷󠀱󠁿 Flag for Kırıkkale (TR-71) 🏴󠁴󠁴󠁰󠁥󠁤󠁿 Flag for Penal-Debe (TT-PED) 🏴󠁴󠁲󠀷󠀶󠁿 Flag for Iğdır (TR-76) 🏴󠁴󠁲󠀷󠀳󠁿 Flag for Şırnak (TR-73) 🏴󠁴󠁲󠀶󠀱󠁿 Flag for Trabzon (TR-61) 🏴󠁴󠁴󠁰󠁴󠁦󠁿 Flag for Point Fortin (TT-PTF) 🏴󠁴󠁲󠀶󠀲󠁿 Flag for Tunceli (TR-62) 🏴󠁴󠁲󠀶󠀰󠁿 Flag for Tokat (TR-60) 🏴󠁴󠁲󠀷󠀰󠁿 Flag for Karaman (TR-70) 🏴󠁴󠁴󠁳󠁦󠁯󠁿 Flag for San Fernando (TT-SFO) 🏴󠁴󠁲󠀵󠀸󠁿 Flag for Sivas (TR-58) 🏴󠁴󠁺󠀰󠀷󠁿 Flag for Zanzibar North (TZ-07) 🏴󠁴󠁷󠁣󠁨󠁡󠁿 Flag for Changhua (TW-CHA) 🏴󠁴󠁶󠁶󠁡󠁩󠁿 Flag for Vaitupu (TV-VAI) 🏴󠁴󠁷󠁫󠁨󠁨󠁿 Flag for Kaohsiung (TW-KHH) 🏴󠁴󠁺󠀰󠀹󠁿 Flag for Kilimanjaro (TZ-09) 🏴󠁴󠁷󠁫󠁩󠁮󠁿 Flag for Kinmen (TW-KIN) 🏴󠁴󠁷󠁰󠁥󠁮󠁿 Flag for Penghu (TW-PEN) 🏴󠁴󠁷󠁴󠁮󠁮󠁿 Flag for Tainan (TW-TNN) 🏴󠁴󠁶󠁮󠁫󠁦󠁿 Flag for Nukufetau (TV-NKF) 🏴󠁴󠁺󠀰󠀸󠁿 Flag for Kigoma (TZ-08) 🏴󠁴󠁷󠁴󠁰󠁥󠁿 Flag for Taipei (TW-TPE) 🏴󠁴󠁷󠁰󠁩󠁦󠁿 Flag for Pingtung (TW-PIF) 🏴󠁴󠁷󠁩󠁬󠁡󠁿 Flag for Yilan (TW-ILA) 🏴󠁴󠁷󠁴󠁡󠁯󠁿 Flag for Taoyuan (TW-TAO) 🏴󠁴󠁺󠀰󠀳󠁿 Flag for Dodoma (TZ-03) 🏴󠁴󠁶󠁮󠁵󠁩󠁿 Flag for Nui (TV-NUI) 🏴󠁴󠁶󠁮󠁩󠁴󠁿 Flag for Niutao (TV-NIT) 🏴󠁴󠁺󠀰󠀶󠁿 Flag for North Pemba (TZ-06) 🏴󠁴󠁷󠁮󠁷󠁴󠁿 Flag for New Taipei (TW-NWT) 🏴󠁴󠁺󠀰󠀴󠁿 Flag for Iringa (TZ-04) 🏴󠁴󠁺󠀰󠀵󠁿 Flag for Kagera (TZ-05) 🏴󠁴󠁷󠁹󠁵󠁮󠁿 Flag for Yunlin (TW-YUN) 🏴󠁴󠁷󠁬󠁩󠁥󠁿 Flag for Lienchiang (TW-LIE) 🏴󠁴󠁶󠁮󠁭󠁧󠁿 Flag for Nanumanga (TV-NMG) 🏴󠁴󠁺󠀰󠀲󠁿 Flag for Dar es Salaam (TZ-02) 🏴󠁴󠁶󠁮󠁭󠁡󠁿 Flag for Nanumea (TV-NMA) 🏴󠁴󠁷󠁴󠁴󠁴󠁿 Flag for Taitung (TW-TTT) 🏴󠁴󠁷󠁮󠁡󠁮󠁿 Flag for Nantou (TW-NAN) 🏴󠁴󠁷󠁣󠁹󠁱󠁿 Flag for Chiayi (TW-CYQ) 🏴󠁴󠁺󠀰󠀱󠁿 Flag for Arusha (TZ-01) 🏴󠁴󠁷󠁨󠁵󠁡󠁿 Flag for Hualien (TW-HUA) 🏴󠁴󠁷󠁣󠁹󠁩󠁿 Flag for Chiayi County (TW-CYI) 🏴󠁴󠁷󠁴󠁸󠁧󠁿 Flag for Taichung (TW-TXG) 🏴󠁴󠁷󠁫󠁥󠁥󠁿 Flag for Keelung (TW-KEE) 🏴󠁴󠁷󠁭󠁩󠁡󠁿 Flag for Miaoli (TW-MIA) 🏴󠁵󠁡󠀴󠀳󠁿 Flag for Crimea (UA-43) 🏴󠁴󠁺󠀱󠀲󠁿 Flag for Lindi (TZ-12) 🏴󠁴󠁺󠀲󠀶󠁿 Flag for Manyara (TZ-26) 🏴󠁵󠁡󠀰󠀹󠁿 Flag for Luhanshchyna (UA-09) 🏴󠁴󠁺󠀲󠀰󠁿 Flag for Rukwa (TZ-20) 🏴󠁵󠁡󠀱󠀲󠁿 Flag for Dnipropetrovshchyna (UA-12) 🏴󠁵󠁡󠀰󠀷󠁿 Flag for Volyn (UA-07) 🏴󠁴󠁺󠀲󠀲󠁿 Flag for Shinyanga (TZ-22) 🏴󠁵󠁡󠀰󠀵󠁿 Flag for Vinnychchyna (UA-05) 🏴󠁴󠁺󠀲󠀱󠁿 Flag for Ruvuma (TZ-21) 🏴󠁴󠁺󠀲󠀸󠁿 Flag for Katavi (TZ-28) 🏴󠁵󠁡󠀲󠀳󠁿 Flag for Zaporizhzhya (UA-23) 🏴󠁵󠁡󠀳󠀲󠁿 Flag for Kyivshchyna (UA-32) 🏴󠁴󠁺󠀲󠀳󠁿 Flag for Singida (TZ-23) 🏴󠁴󠁺󠀲󠀴󠁿 Flag for Tabora (TZ-24) 🏴󠁴󠁺󠀱󠀳󠁿 Flag for Mara (TZ-13) 🏴󠁴󠁺󠀲󠀷󠁿 Flag for Geita (TZ-27) 🏴󠁴󠁺󠀳󠀰󠁿 Flag for Simiyu (TZ-30) 🏴󠁵󠁡󠀴󠀸󠁿 Flag for Mykolayivschyna (UA-48) 🏴󠁵󠁡󠀳󠀵󠁿 Flag for Kirovohradschyna (UA-35) 🏴󠁵󠁡󠀵󠀶󠁿 Flag for Rivnenshchyna (UA-56) 🏴󠁵󠁡󠀵󠀳󠁿 Flag for Poltavshchyna (UA-53) 🏴󠁴󠁺󠀱󠀴󠁿 Flag for Mbeya (TZ-14) 🏴󠁴󠁺󠀱󠀸󠁿 Flag for Mwanza (TZ-18) 🏴󠁵󠁡󠀲󠀱󠁿 Flag for Zakarpattia (UA-21) 🏴󠁴󠁺󠀱󠀰󠁿 Flag for South Pemba (TZ-10) 🏴󠁴󠁺󠀱󠀹󠁿 Flag for Pwani (TZ-19) 🏴󠁴󠁺󠀱󠀷󠁿 Flag for Mtwara (TZ-17) 🏴󠁵󠁡󠀴󠀰󠁿 Flag for Sevastopol (UA-40) 🏴󠁵󠁡󠀵󠀱󠁿 Flag for Odeshchyna (UA-51) 🏴󠁵󠁡󠀴󠀶󠁿 Flag for Lvivshchyna (UA-46) 🏴󠁵󠁡󠀱󠀴󠁿 Flag for Donechchyna (UA-14) 🏴󠁵󠁡󠀲󠀶󠁿 Flag for Prykarpattia (UA-26) 🏴󠁴󠁺󠀱󠀵󠁿 Flag for Zanzibar Urban/West (TZ-15) 🏴󠁴󠁺󠀱󠀶󠁿 Flag for Morogoro (TZ-16) 🏴󠁴󠁺󠀲󠀹󠁿 Flag for Njombe (TZ-29) 🏴󠁵󠁡󠀷󠀷󠁿 Flag for Chernivtsi Oblast (UA-77) 🏴󠁵󠁭󠀹󠀵󠁿 Flag for Palmyra Atoll (UM-95) 🏴󠁵󠁳󠁫󠁳󠁿 Flag for Kansas (US-KS) 👨🏽‍👨🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁵󠁳󠁡󠁺󠁿 Flag for Arizona (US-AZ) 🏴󠁵󠁭󠀶󠀷󠁿 Flag for Johnston Atoll (UM-67) 🏴󠁵󠁡󠀷󠀴󠁿 Flag for Chernihivshchyna (UA-74) 🏴󠁵󠁭󠀸󠀴󠁿 Flag for Howland Island (UM-84) 🏴󠁵󠁳󠁧󠁡󠁿 Flag for Georgia (US-GA) 🏴󠁵󠁳󠁨󠁩󠁿 Flag for Hawaii (US-HI) 🏴󠁵󠁭󠀷󠀱󠁿 Flag for Midway Atoll (UM-71) 🏴󠁵󠁳󠁡󠁳󠁿 Flag for American Samoa (US-AS) 🏴󠁵󠁳󠁣󠁴󠁿 Flag for Connecticut (US-CT) 🏴󠁵󠁳󠁩󠁡󠁿 Flag for Iowa (US-IA) 🏴󠁵󠁡󠀶󠀱󠁿 Flag for Ternopilshchyna (UA-61) 🏴󠁵󠁧󠁮󠁿 Flag for Northern (UG-N) 🏴󠁵󠁳󠁧󠁵󠁿 Flag for Guam (US-GU) 🏴󠁵󠁭󠀸󠀱󠁿 Flag for Baker Island (UM-81) 🏴󠁵󠁧󠁥󠁿 Flag for Eastern (UG-E) 🏴󠁵󠁡󠀶󠀵󠁿 Flag for Khersonshchyna (UA-65) 🏴󠁵󠁡󠀵󠀹󠁿 Flag for Sumshchyna (UA-59) 🏴󠁵󠁳󠁩󠁮󠁿 Flag for Indiana (US-IN) 🏴󠁵󠁳󠁡󠁲󠁿 Flag for Arkansas (US-AR) 🏴󠁵󠁳󠁤󠁥󠁿 Flag for Delaware (US-DE) 🏴󠁵󠁡󠀶󠀳󠁿 Flag for Kharkivshchyna (UA-63) 🏴󠁵󠁳󠁡󠁬󠁿 Flag for Alabama (US-AL) 🏴󠁵󠁧󠁷󠁿 Flag for Western (UG-W) 🏴󠁵󠁡󠀶󠀸󠁿 Flag for Khmelnychchyna (UA-68) 🏴󠁵󠁭󠀷󠀶󠁿 Flag for Navassa Island (UM-76) 🏴󠁵󠁭󠀸󠀶󠁿 Flag for Jarvis Island (UM-86) 🏴󠁵󠁳󠁩󠁤󠁿 Flag for Idaho (US-ID) 🏴󠁵󠁭󠀸󠀹󠁿 Flag for Kingman Reef (UM-89) 🏴󠁵󠁳󠁦󠁬󠁿 Flag for Florida (US-FL) 🏴󠁵󠁭󠀷󠀹󠁿 Flag for Wake Island (UM-79) 🏴󠁵󠁳󠁩󠁬󠁿 Flag for Illinois (US-IL) 🏴󠁵󠁳󠁤󠁣󠁿 Flag for Washington DC (US-DC) 🏴󠁵󠁡󠀷󠀱󠁿 Flag for Cherkashchyna (UA-71) 🏴󠁵󠁳󠁮󠁹󠁿 Flag for New York (US-NY) 👨🏾‍👨🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁵󠁳󠁮󠁣󠁿 Flag for North Carolina (US-NC) 🏴󠁵󠁳󠁭󠁳󠁿 Flag for Mississippi (US-MS) 🏴󠁵󠁳󠁭󠁡󠁿 Flag for Massachusetts (US-MA) 🏴󠁵󠁳󠁮󠁶󠁿 Flag for Nevada (US-NV) 🏴󠁵󠁳󠁷󠁩󠁿 Flag for Wisconsin (US-WI) 🏴󠁵󠁳󠁭󠁤󠁿 Flag for Maryland (US-MD) 🏴󠁵󠁳󠁮󠁭󠁿 Flag for New Mexico (US-NM) 🏴󠁵󠁳󠁰󠁲󠁿 Flag for Puerto Rico (US-PR) 🏴󠁵󠁳󠁵󠁭󠁿 Flag for U.S. Outlying Islands (US-UM) 🏴󠁵󠁳󠁷󠁹󠁿 Flag for Wyoming (US-WY) 🏴󠁵󠁳󠁯󠁨󠁿 Flag for Ohio (US-OH) 🏴󠁵󠁳󠁫󠁹󠁿 Flag for Kentucky (US-KY) 🏴󠁵󠁳󠁮󠁪󠁿 Flag for New Jersey (US-NJ) 🏴󠁵󠁳󠁯󠁲󠁿 Flag for Oregon (US-OR) 🏴󠁵󠁳󠁭󠁩󠁿 Flag for Michigan (US-MI) 🏴󠁵󠁳󠁶󠁩󠁿 Flag for U.S. Virgin Islands (US-VI) 🏴󠁵󠁳󠁭󠁯󠁿 Flag for Missouri (US-MO) 🏴󠁵󠁳󠁰󠁡󠁿 Flag for Pennsylvania (US-PA) 🏴󠁵󠁳󠁶󠁡󠁿 Flag for Virginia (US-VA) 🏴󠁵󠁹󠁡󠁲󠁿 Flag for Artigas (UY-AR) 🏴󠁵󠁹󠁣󠁡󠁿 Flag for Canelones (UY-CA) 🏴󠁵󠁳󠁷󠁡󠁿 Flag for Washington (US-WA) 🏴󠁵󠁳󠁳󠁣󠁿 Flag for South Carolina (US-SC) 🏴󠁵󠁳󠁭󠁥󠁿 Flag for Maine (US-ME) 🏴󠁵󠁳󠁬󠁡󠁿 Flag for Louisiana (US-LA) 🏴󠁵󠁳󠁭󠁮󠁿 Flag for Minnesota (US-MN) 🏴󠁵󠁳󠁲󠁩󠁿 Flag for Rhode Island (US-RI) 🏴󠁵󠁳󠁷󠁶󠁿 Flag for West Virginia (US-WV) 🏴󠁵󠁳󠁴󠁸󠁿 Flag for Texas (US-TX) 🏴󠁵󠁳󠁵󠁴󠁿 Flag for Utah (US-UT) 🏴󠁵󠁳󠁯󠁫󠁿 Flag for Oklahoma (US-OK) 🏴󠁵󠁳󠁮󠁨󠁿 Flag for New Hampshire (US-NH) 🏴󠁵󠁺󠁳󠁡󠁿 Flag for Samarqand (UZ-SA) 🏴󠁵󠁹󠁭󠁡󠁿 Flag for Maldonado (UY-MA) 🏴󠁵󠁺󠁮󠁧󠁿 Flag for Namangan (UZ-NG) 🏴󠁶󠁣󠀰󠀱󠁿 Flag for Charlotte (VC-01) 🏴󠁵󠁹󠁳󠁡󠁿 Flag for Salto (UY-SA) 🏴󠁵󠁹󠁣󠁬󠁿 Flag for Cerro Largo (UY-CL) 🏴󠁵󠁹󠁴󠁡󠁿 Flag for Tacuarembó (UY-TA) 🏴󠁶󠁥󠁡󠁿 Flag for Capital (VE-A) 🏴󠁶󠁥󠁢󠁿 Flag for Anzoátegui (VE-B) 🏴󠁶󠁣󠀰󠀲󠁿 Flag for Saint Andrew (VC-02) 🏴󠁵󠁹󠁳󠁯󠁿 Flag for Soriano (UY-SO) 🏴󠁵󠁹󠁲󠁯󠁿 Flag for Rocha (UY-RO) 🏴󠁶󠁣󠀰󠀳󠁿 Flag for Saint David (VC-03) 🏴󠁵󠁹󠁳󠁪󠁿 Flag for San José (UY-SJ) 🏴󠁵󠁹󠁦󠁤󠁿 Flag for Florida (UY-FD) 🏴󠁵󠁹󠁣󠁯󠁿 Flag for Colonia (UY-CO) 🏴󠁵󠁹󠁦󠁳󠁿 Flag for Flores (UY-FS) 🏴󠁵󠁺󠁸󠁯󠁿 Flag for Xorazm (UZ-XO) 🏴󠁵󠁹󠁤󠁵󠁿 Flag for Durazno (UY-DU) 🏴󠁵󠁺󠁡󠁮󠁿 Flag for Andijan (UZ-AN) 🏴󠁶󠁥󠁤󠁿 Flag for Aragua (VE-D) 🏴󠁵󠁺󠁳󠁩󠁿 Flag for Sirdaryo (UZ-SI) 🏴󠁵󠁹󠁰󠁡󠁿 Flag for Paysandú (UY-PA) 🏴󠁶󠁣󠀰󠀶󠁿 Flag for Grenadines (VC-06) 🏴󠁵󠁹󠁲󠁶󠁿 Flag for Rivera (UY-RV) 🏴󠁵󠁹󠁬󠁡󠁿 Flag for Lavalleja (UY-LA) 🏴󠁵󠁺󠁳󠁵󠁿 Flag for Surxondaryo (UZ-SU) 🏴󠁵󠁺󠁴󠁯󠁿 Flag for Tashkent Province (UZ-TO) 🏴󠁵󠁺󠁱󠁡󠁿 Flag for Qashqadaryo (UZ-QA) 🏴󠁵󠁹󠁴󠁴󠁿 Flag for Treinta y Tres (UY-TT) 🏴󠁵󠁹󠁭󠁯󠁿 Flag for Montevideo (UY-MO) 🏴󠁵󠁺󠁢󠁵󠁿 Flag for Bukhara (UZ-BU) 🏴󠁵󠁺󠁦󠁡󠁿 Flag for Fergana (UZ-FA) 🏴󠁵󠁺󠁱󠁲󠁿 Flag for Karakalpakstan (UZ-QR) 🏴󠁵󠁺󠁪󠁩󠁿 Flag for Jizzakh (UZ-JI) 🏴󠁵󠁹󠁲󠁮󠁿 Flag for Río Negro (UY-RN) 🏴󠁵󠁺󠁴󠁫󠁿 Flag for Tashkent (UZ-TK) 🏴󠁶󠁣󠀰󠀵󠁿 Flag for Saint Patrick (VC-05) 🏴󠁵󠁺󠁮󠁷󠁿 Flag for Navoiy (UZ-NW) 🏴󠁶󠁥󠁫󠁿 Flag for Lara (VE-K) 🏴󠁶󠁥󠁯󠁿 Flag for Nueva Esparta (VE-O) 🏴󠁶󠁥󠁳󠁿 Flag for Táchira (VE-S) 🏴󠁶󠁥󠁦󠁿 Flag for Bolívar (VE-F) 🏴󠁶󠁮󠀲󠀱󠁿 Flag for Thanh Hóa (VN-21) 🏴󠁶󠁮󠀱󠀴󠁿 Flag for Hòa Bình (VN-14) 🏴󠁶󠁥󠁪󠁿 Flag for Guárico (VE-J) 🏴󠁶󠁥󠁨󠁿 Flag for Cojedes (VE-H) 🏴󠁶󠁮󠀲󠀶󠁿 Flag for Thừa Thiên–Huế (VN-26) 🏴󠁶󠁥󠁰󠁿 Flag for Portuguesa (VE-P) 🏴󠁶󠁮󠀱󠀸󠁿 Flag for Ninh Bình (VN-18) 🏴󠁶󠁥󠁲󠁿 Flag for Sucre (VE-R) 🏴󠁶󠁮󠀰󠀱󠁿 Flag for Lai Châu (VN-01) 🏴󠁶󠁮󠀰󠀹󠁿 Flag for Lạng Sơn (VN-09) 🏴󠁶󠁥󠁭󠁿 Flag for Miranda (VE-M) 🏴󠁶󠁮󠀲󠀴󠁿 Flag for Quảng Bình (VN-24) 🏴󠁶󠁥󠁥󠁿 Flag for Barinas (VE-E) 🏴󠁶󠁥󠁮󠁿 Flag for Monagas (VE-N) 🏴󠁶󠁮󠀲󠀲󠁿 Flag for Nghệ An (VN-22) 🏴󠁶󠁮󠀰󠀲󠁿 Flag for Lào Cai (VN-02) 🏴󠁶󠁮󠀰󠀷󠁿 Flag for Tuyên Quang (VN-07) 🏴󠁶󠁮󠀰󠀵󠁿 Flag for Sơn La (VN-05) 🏴󠁶󠁮󠀲󠀰󠁿 Flag for Thái Bình (VN-20) 🏴󠁶󠁥󠁷󠁿 Flag for Federal Dependencies (VE-W) 🏴󠁶󠁮󠀲󠀹󠁿 Flag for Quảng Ngãi (VN-29) 🏴󠁶󠁥󠁬󠁿 Flag for Mérida (VE-L) 🏴󠁶󠁥󠁩󠁿 Flag for Falcón (VE-I) 🏴󠁶󠁮󠀰󠀴󠁿 Flag for Cao Bằng (VN-04) 🏴󠁶󠁥󠁺󠁿 Flag for Amazonas (VE-Z) 🏴󠁶󠁮󠀰󠀶󠁿 Flag for Yên Bái (VN-06) 🏴󠁶󠁮󠀲󠀳󠁿 Flag for Hà Tĩnh (VN-23) 🏴󠁶󠁮󠀲󠀸󠁿 Flag for Kon Tum (VN-28) 🏴󠁶󠁥󠁸󠁿 Flag for Vargas (VE-X) 🏴󠁶󠁥󠁵󠁿 Flag for Yaracuy (VE-U) 🏴󠁶󠁥󠁴󠁿 Flag for Trujillo (VE-T) 🏴󠁶󠁮󠀱󠀳󠁿 Flag for Quảng Ninh (VN-13) 🏴󠁶󠁮󠀰󠀳󠁿 Flag for Hà Giang (VN-03) 🏴󠁶󠁮󠀲󠀷󠁿 Flag for Quảng Nam (VN-27) 🏴󠁶󠁮󠀵󠀶󠁿 Flag for Bắc Ninh (VN-56) 🏴󠁶󠁮󠀳󠀶󠁿 Flag for Ninh Thuận (VN-36) 🏴󠁶󠁮󠀶󠀹󠁿 Flag for Thái Nguyên (VN-69) 🏴󠁶󠁮󠀶󠀷󠁿 Flag for Nam Định (VN-67) 🏴󠁶󠁮󠀳󠀵󠁿 Flag for Lâm Đồng (VN-35) 🏴󠁶󠁮󠀶󠀱󠁿 Flag for Hải Dương (VN-61) 🏴󠁶󠁮󠀵󠀲󠁿 Flag for Sóc Trăng (VN-52) 🏴󠁶󠁮󠀷󠀳󠁿 Flag for Hậu Giang (VN-73) 🏴󠁶󠁮󠀷󠀰󠁿 Flag for Vĩnh Phúc (VN-70) 🏴󠁶󠁮󠀵󠀰󠁿 Flag for Bến Tre (VN-50) 🏴󠁶󠁮󠀵󠀳󠁿 Flag for Bắc Kạn (VN-53) 🏴󠁶󠁮󠀵󠀴󠁿 Flag for Bắc Giang (VN-54) 🏴󠁶󠁮󠀳󠀳󠁿 Flag for Đắk Lắk (VN-33) 🏴󠁶󠁮󠀵󠀷󠁿 Flag for Bình Dương (VN-57) 🏴󠁶󠁮󠁤󠁮󠁿 Flag for Da Nang (VN-DN) 🏴󠁶󠁮󠀴󠀶󠁿 Flag for Tiền Giang (VN-46) 🏴󠁶󠁮󠀴󠀳󠁿 Flag for Bà Rịa–Vũng Tàu (VN-43) 🏴󠁶󠁮󠀷󠀱󠁿 Flag for Điện Biên (VN-71) 🏴󠁶󠁮󠀵󠀸󠁿 Flag for Bình Phước (VN-58) 🏴󠁶󠁮󠁣󠁴󠁿 Flag for Can Tho (VN-CT) 🏴󠁶󠁮󠀵󠀵󠁿 Flag for Bạc Liêu (VN-55) 🏴󠁶󠁮󠀳󠀲󠁿 Flag for Phú Yên (VN-32) 🏴󠁶󠁮󠀴󠀴󠁿 Flag for An Giang (VN-44) 🏴󠁶󠁮󠀶󠀳󠁿 Flag for Hà Nam (VN-63) 🏴󠁶󠁮󠀵󠀹󠁿 Flag for Cà Mau (VN-59) 🏴󠁶󠁮󠀴󠀷󠁿 Flag for Kiên Giang (VN-47) 🏴󠁶󠁮󠀳󠀴󠁿 Flag for Khánh Hòa (VN-34) 🏴󠁶󠁮󠀴󠀵󠁿 Flag for Đồng Tháp (VN-45) 🏴󠁶󠁮󠀳󠀹󠁿 Flag for Đồng Nai (VN-39) 🏴󠁶󠁮󠁨󠁮󠁿 Flag for Hanoi (VN-HN) 🏴󠁶󠁮󠀴󠀹󠁿 Flag for Vĩnh Long (VN-49) 🏴󠁶󠁮󠀶󠀸󠁿 Flag for Phú Thọ (VN-68) 🏴󠁶󠁮󠀳󠀷󠁿 Flag for Tây Ninh (VN-37) 🏴󠁶󠁮󠀳󠀰󠁿 Flag for Gia Lai (VN-30) 🏴󠁶󠁮󠀷󠀲󠁿 Flag for Đắk Nông (VN-72) 🏴󠁶󠁮󠀴󠀰󠁿 Flag for Bình Thuận (VN-40) 🏴󠁶󠁮󠀴󠀱󠁿 Flag for Long An (VN-41) 🏴󠁶󠁮󠀳󠀱󠁿 Flag for Bình Định (VN-31) 🏴󠁷󠁦󠁵󠁶󠁿 Flag for Uvea (WF-UV) 🏴󠁹󠁥󠁳󠁤󠁿 Flag for Sa’dah (YE-SD) 🏴󠁹󠁥󠁡󠁢󠁿 Flag for Abyan (YE-AB) 🏴󠁹󠁥󠁨󠁪󠁿 Flag for Hajjah (YE-HJ) 🏴󠁶󠁵󠁭󠁡󠁰󠁿 Flag for Malampa (VU-MAP) 🏴󠁷󠁳󠁡󠁴󠁿 Flag for Atua (WS-AT) 🏴󠁷󠁳󠁶󠁦󠁿 Flag for Va’a-o-Fonoti (WS-VF) 🏴󠁹󠁥󠁨󠁵󠁿 Flag for Al Hudaydah (YE-HU) 🏴󠁷󠁳󠁰󠁡󠁿 Flag for Palauli (WS-PA) 🏴󠁷󠁳󠁳󠁡󠁿 Flag for Satupa’itea (WS-SA) 🏴󠁹󠁥󠁤󠁡󠁿 Flag for Dhale (YE-DA) 🏴󠁭󠁬󠀶󠁿 Flag for Tombouctou (ML-6) 🏴󠁹󠁥󠁲󠁡󠁿 Flag for Raymah (YE-RA) 🏴󠁶󠁵󠁳󠁡󠁭󠁿 Flag for Sanma (VU-SAM) 🏴󠁷󠁦󠁡󠁬󠁿 Flag for Alo (WF-AL) 🏴󠁹󠁥󠁭󠁲󠁿 Flag for Al Mahrah (YE-MR) 👨🏻‍👨🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone 🏴󠁹󠁥󠁡󠁤󠁿 Flag for ’Adan (YE-AD) 🏴󠁹󠁥󠁳󠁨󠁿 Flag for Shabwah (YE-SH) 🏴󠁶󠁵󠁴󠁡󠁥󠁿 Flag for Tafea (VU-TAE) 🏴󠁹󠁥󠁡󠁭󠁿 Flag for Amran (YE-AM) 🏴󠁶󠁵󠁰󠁡󠁭󠁿 Flag for Penama (VU-PAM) 🏴󠁹󠁥󠁭󠁷󠁿 Flag for Al Mahwit (YE-MW) 🏴󠁷󠁳󠁧󠁥󠁿 Flag for Gaga’emauga (WS-GE) 🏴󠁹󠁥󠁨󠁤󠁿 Flag for Hadramaut (YE-HD) 🏴󠁷󠁳󠁡󠁬󠁿 Flag for Aiga-i-le-Tai (WS-AL) 🏴󠁹󠁥󠁭󠁡󠁿 Flag for Ma’rib (YE-MA) 🏴󠁹󠁥󠁢󠁡󠁿 Flag for Al Bayda (YE-BA) 🏴󠁶󠁮󠁨󠁰󠁿 Flag for Haiphong (VN-HP) 🏴󠁷󠁳󠁡󠁡󠁿 Flag for A’ana (WS-AA) 🏴󠁷󠁦󠁳󠁧󠁿 Flag for Sigave (WF-SG) 🏴󠁹󠁥󠁬󠁡󠁿 Flag for Lahij (YE-LA) 🏴󠁶󠁵󠁳󠁥󠁥󠁿 Flag for Shefa (VU-SEE) 🏴󠁹󠁥󠁩󠁢󠁿 Flag for Ibb (YE-IB) 🏴󠁶󠁵󠁴󠁯󠁢󠁿 Flag for Torba (VU-TOB) 🏴󠁹󠁥󠁪󠁡󠁿 Flag for Al Jawf (YE-JA) 🏴󠁷󠁳󠁴󠁵󠁿 Flag for Tuamasaga (WS-TU) 🏴󠁹󠁥󠁤󠁨󠁿 Flag for Dhamar (YE-DH) 🏴󠁺󠁡󠁷󠁣󠁿 Flag for Western Cape (ZA-WC) 🏴󠁹󠁥󠁳󠁵󠁿 Flag for Arkhabil Suqutra (YE-SU) 🏴󠁺󠁷󠁭󠁮󠁿 Flag for Matabeleland North (ZW-MN) 🏴󠁺󠁷󠁭󠁥󠁿 Flag for Mashonaland East (ZW-ME) 🏴󠁺󠁭󠀰󠀶󠁿 Flag for North-Western (ZM-06) 🏴󠁹󠁥󠁳󠁮󠁿 Flag for Sana’a (YE-SN) 🏴󠁺󠁡󠁬󠁰󠁿 Flag for Limpopo (ZA-LP) 🏴󠁺󠁭󠀰󠀳󠁿 Flag for Eastern (ZM-03) 🏴󠁺󠁷󠁭󠁩󠁿 Flag for Midlands (ZW-MI) 🏴󠁺󠁷󠁢󠁵󠁿 Flag for Bulawayo (ZW-BU) 🏴󠁺󠁭󠀰󠀵󠁿 Flag for Northern (ZM-05) 🏴󠁺󠁭󠀰󠀷󠁿 Flag for Southern (ZM-07) 🏴󠁺󠁡󠁦󠁳󠁿 Flag for Free (ZA-FS) 🏴󠁺󠁷󠁭󠁳󠁿 Flag for Matabeleland South (ZW-MS) 🏴󠁺󠁡󠁥󠁣󠁿 Flag for Eastern Cape (ZA-EC) 🏴󠁺󠁭󠀰󠀱󠁿 Flag for Western (ZM-01) 👨🏼‍👨🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁺󠁭󠀰󠀸󠁿 Flag for Copperbelt (ZM-08) 🏴󠁺󠁡󠁮󠁷󠁿 Flag for North West (ZA-NW) 🏴󠁺󠁭󠀱󠀰󠁿 Flag for Muchinga (ZM-10) 🏴󠁺󠁡󠁧󠁴󠁿 Flag for Gauteng (ZA-GT) 🏴󠁺󠁭󠀰󠀹󠁿 Flag for Lusaka (ZM-09) 🏴󠁺󠁭󠀰󠀲󠁿 Flag for Central (ZM-02) 🏴󠁺󠁡󠁮󠁣󠁿 Flag for Northern Cape (ZA-NC) 🏴󠁺󠁡󠁭󠁰󠁿 Flag for Mpumalanga (ZA-MP) 🏴󠁹󠁥󠁴󠁡󠁿 Flag for Taiz (YE-TA) 🏴󠁺󠁡󠁮󠁬󠁿 Flag for KwaZulu-Natal (ZA-NL) 🏴󠁺󠁷󠁭󠁡󠁿 Flag for Manicaland (ZW-MA) 🏴󠁺󠁷󠁭󠁶󠁿 Flag for Masvingo (ZW-MV) 🏴󠁺󠁭󠀰󠀴󠁿 Flag for Luapula (ZM-04) 🏴󠁺󠁷󠁭󠁷󠁿 Flag for Mashonaland West (ZW-MW) 🏴󠁺󠁷󠁨󠁡󠁿 Flag for Harare (ZW-HA) 👨🏽‍👨🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone 👨🏾‍👨🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁦󠁲󠁰󠁤󠁬󠁿 Flag for Pays-de-la-Loire (FR-PDL) 🏴󠁬󠁴󠀲󠀰󠁿 Flag for Klaipėdos Municipality (LT-20) 🏴󠁧󠁲󠁭󠁿 Flag for Crete (GR-M) 󠁸 Tag Latin Small Letter X 🏴󠁩󠁲󠀲󠀱󠁿 Flag for Mazandaran (IR-21) 🏴󠁲󠁵󠁰󠁲󠁩󠁿 Flag for Primorsky Krai (RU-PRI) 🏴󠁪󠁰󠀰󠀷󠁿 Flag for Fukushima (JP-07) 🏴󠁣󠁡󠁭󠁢󠁿 Flag for Manitoba (CA-MB) 👨🏻‍👨🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👩🏻‍❤️‍👩🏻 Couple With Heart - Woman: Light Skin Tone, Woman: Light Skin Tone 🏴󠁣󠁡󠁱󠁣󠁿 Flag for Quebec (CA-QC) 👨‍👩‍👶 Family: Man, Woman, Baby 🏴󠁮󠁡󠁫󠁥󠁿 Flag for Kavango East (NA-KE) 🏴󠁭󠁸󠁳󠁬󠁰󠁿 Flag for San Luis Potosí (MX-SLP) 🏴󠁥󠁥󠀵󠀹󠁿 Flag for Lääne-Viru (EE-59) 🏴󠁬󠁲󠁢󠁧󠁿 Flag for Bong (LR-BG) 🏴󠁰󠁳󠁤󠁥󠁢󠁿 Flag for Deir al-Balah (PS-DEB) 👨🏿‍👨🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁪󠁭󠀰󠀳󠁿 Flag for Saint Thomas (JM-03) 🏴󠁰󠁷󠀱󠀰󠀰󠁿 Flag for Kayangel (PW-100) 🏴󠁣󠁧󠀱󠀲󠁿 Flag for Pool (CG-12) 👨‍❤️‍👨🏾 Couple With Heart - Man, Man: Medium-Dark Skin Tone 🏴󠁥󠁳󠁩󠁢󠁿 Flag for Balearic Islands (ES-IB) 👩‍👨‍👦 Family: Woman, Man, Boy 🏴󠁦󠁩󠀱󠀸󠁿 Flag for Uusimaa (FI-18) 👨🏻‍👩🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁲󠁣󠁥󠁿 Flag for Ceará (BR-CE) 👨‍👩‍👦‍👶 Family: Man, Woman, Boy, Baby 👨🏻‍👨🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁭󠁫󠀲󠀵󠁿 Flag for Demir Hisar (MK-25) 🏴󠁣󠁬󠁡󠁮󠁿 Flag for Antofagasta (CL-AN) 🏴󠁢󠁢󠀰󠀱󠁿 Flag for Christ Church (BB-01) 🏴󠁥󠁥󠀳󠀷󠁿 Flag for Harju (EE-37) 👨🏿‍❤️‍💋‍👩🏽 Kiss - Man: Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁮󠁲󠀱󠀴󠁿 Flag for Yaren (NR-14) 👩‍❤️‍👩🏻 Couple With Heart - Woman, Woman: Light Skin Tone 🏴󠁭󠁹󠀱󠀰󠁿 Flag for Selangor (MY-10) 👨🏼‍👨🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁰󠁥󠁡󠁰󠁵󠁿 Flag for Apurímac (PE-APU) 👩‍👨‍👦‍👧 Family: Woman, Man, Boy, Girl 👨🏿‍👩🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁧󠁥󠁡󠁢󠁿 Flag for Abkhazia (GE-AB) 🏴󠁬󠁩󠀰󠀸󠁿 Flag for Schellenberg (LI-08) 🏴󠁴󠁲󠀸󠀱󠁿 Flag for Düzce (TR-81) 👩🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩‍👨‍👶‍👦 Family: Woman, Man, Baby, Boy 🏴󠁭󠁸󠁳󠁯󠁮󠁿 Flag for Sonora (MX-SON) 🏴󠁣󠁩󠁳󠁭󠁿 Flag for Sassandra-Marahoué (CI-SM) 🏴󠁰󠁥󠁡󠁲󠁥󠁿 Flag for Arequipa (PE-ARE) 👩🏽‍❤️‍👩🏼 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁣󠁧󠀱󠀱󠁿 Flag for Bouenza (CG-11) 🏴󠁪󠁭󠀱󠀴󠁿 Flag for Saint Catherine (JM-14) 🏴󠁳󠁩󠀱󠀲󠀲󠁿 Flag for Škofja Loka (SI-122) 👩🏻‍❤️‍💋‍👨🏼 Kiss - Woman: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁴󠁷󠁨󠁳󠁺󠁿 Flag for Hsinchu (TW-HSZ) 👩🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁬󠁫󠀳󠁿 Flag for Southern (LK-3) 👨‍❤️‍💋‍👨🏼 Kiss - Man, Man: Medium-Light Skin Tone 👨🏽‍👨🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁮󠁩󠁬󠁥󠁿 Flag for León (NI-LE) 🏴󠁨󠁲󠀰󠀵󠁿 Flag for Varaždin (HR-05) 🏴󠁣󠁯󠁡󠁮󠁴󠁿 Flag for Antioquia (CO-ANT) 🏴󠁭󠁣󠁳󠁤󠁿 Flag for Sainte-Dévote Chapel (MC-SD) 🏴󠁭󠁫󠀶󠀱󠁿 Flag for Plasnica (MK-61) 👨🏾‍❤️‍👨🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Light Skin Tone 🏴󠁧󠁲󠁧󠁿 Flag for West Greece (GR-G) 🏴󠁭󠁶󠁮󠁯󠁿 Flag for North Province (MV-NO) 👨‍❤️‍👩🏻 Couple With Heart - Man, Woman: Light Skin Tone 🏴󠁶󠁥󠁣󠁿 Flag for Apure (VE-C) ☿️ Mercury 🏴󠁵󠁳󠁭󠁴󠁿 Flag for Montana (US-MT) 👩🏼‍❤️‍👨🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏾‍👨🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁥󠁣󠁥󠁿 Flag for Esmeraldas (EC-E) 🏴󠁤󠁺󠀰󠀸󠁿 Flag for Béchar (DZ-08) 🏴󠁮󠁬󠁮󠁨󠁿 Flag for North Holland (NL-NH) 🏴󠁦󠁲󠁢󠁬󠁿 Flag for St. Barthélemy (FR-BL) 🏴󠁣󠁦󠁵󠁫󠁿 Flag for Ouaka (CF-UK) 🏴󠁳󠁤󠁲󠁳󠁿 Flag for Red Sea (SD-RS) 🏴󠁭󠁸󠁴󠁡󠁢󠁿 Flag for Tabasco (MX-TAB) 🏴󠁣󠁮󠀹󠀲󠁿 Flag for Macau SAR China (CN-92) 🏴󠁨󠁵󠁥󠁧󠁿 Flag for Eger (HU-EG) 🏴󠁲󠁵󠁳󠁥󠁿 Flag for North Ossetia-Alania (RU-SE) 🏴󠁣󠁤󠁥󠁱󠁿 Flag for Équateur (CD-EQ) 👨🏿‍👨🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁥󠁳󠁰󠁶󠁿 Flag for Basque Country (ES-PV) 👨🏽‍❤️‍💋‍👨🏻 Kiss - Man: Medium Skin Tone, Man: Light Skin Tone 🏴󠁴󠁮󠀷󠀱󠁿 Flag for Gafsa (TN-71) 🏴󠁦󠁩󠀰󠀶󠁿 Flag for Tavastia Proper (FI-06) 🏴󠁩󠁲󠀳󠀰󠁿 Flag for Razavi Khorasan (IR-30) 🏴󠁳󠁩󠀱󠀵󠀴󠁿 Flag for Dobje (SI-154) 👨🏼‍❤️‍💋‍👨🏻 Kiss - Man: Medium-Light Skin Tone, Man: Light Skin Tone 🏴󠁧󠁴󠁲󠁥󠁿 Flag for Retalhuleu (GT-RE) 🏴󠁫󠁩󠁬󠁿 Flag for Line Islands (KI-L) 🏴󠁩󠁲󠀰󠀲󠁿 Flag for West Azarbaijan (IR-02) 🏴󠁣󠁯󠁮󠁡󠁲󠁿 Flag for Nariño (CO-NAR) 🏴󠁺󠁷󠁭󠁣󠁿 Flag for Mashonaland Central (ZW-MC) 👨🏻‍❤️‍👨🏻 Couple With Heart - Man: Light Skin Tone, Man: Light Skin Tone 🏴󠁩󠁴󠀴󠀵󠁿 Flag for Emilia-Romagna (IT-45) 🏴󠁥󠁳󠁶󠁣󠁿 Flag for Valencian Community (ES-VC) 🏴󠁴󠁨󠀷󠀵󠁿 Flag for Samut Songkhram (TH-75) 🏴󠁦󠁲󠁩󠁤󠁦󠁿 Flag for Île-de-France (FR-IDF) 🏴󠁬󠁳󠁡󠁿 Flag for Maseru (LS-A) 🏴󠁫󠁥󠀲󠀵󠁿 Flag for Marsabit (KE-25) 🏴󠁤󠁺󠀰󠀱󠁿 Flag for Adrar (DZ-01) 🏴󠁳󠁶󠁵󠁳󠁿 Flag for Usulután (SV-US) 🏴󠁬󠁶󠀰󠀶󠀰󠁿 Flag for Mazsalaca (LV-060) 👩🏻‍❤️‍💋‍👩🏾 Kiss - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁴󠁨󠀳󠀶󠁿 Flag for Chaiyaphum (TH-36) 🏴󠁰󠁨󠀰󠀷󠁿 Flag for Central Visayas (PH-07) 🏴󠁴󠁨󠀸󠀶󠁿 Flag for Chumphon (TH-86) 🏴󠁣󠁩󠁺󠁺󠁿 Flag for Zanzan (CI-ZZ) 🏴󠁥󠁳󠁣󠁬󠁿 Flag for Castile and León (ES-CL) 👨🏻‍👨🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁳󠁡󠀱󠀱󠁿 Flag for Al Bahah (SA-11) 🏴󠁢󠁱󠁳󠁥󠁿 Flag for Sint Eustatius (BQ-SE) 🏴󠁦󠁩󠀰󠀱󠁿 Flag for Åland Islands (FI-01) 🏴󠁣󠁲󠁨󠁿 Flag for Heredia (CR-H) 🏴󠁴󠁲󠀴󠀳󠁿 Flag for Kütahya (TR-43) 🏴󠁷󠁳󠁶󠁳󠁿 Flag for Vaisigano (WS-VS) 👨🏿‍❤️‍💋‍👩🏼 Kiss - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁳󠁩󠀰󠀵󠀲󠁿 Flag for Kranj (SI-052) 🏴󠁶󠁥󠁶󠁿 Flag for Zulia (VE-V) 👩🏽‍❤️‍💋‍👨🏼 Kiss - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁬󠁵󠁣󠁡󠁿 Flag for Capellen (LU-CA) 👩🏽‍❤️‍👩🏾 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍👨🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁧󠁹󠁥󠁢󠁿 Flag for East Berbice-Corentyne (GY-EB) 🏴󠁴󠁨󠀱󠀶󠁿 Flag for Lopburi (TH-16) 🏴󠁭󠁴󠀲󠀵󠁿 Flag for Luqa (MT-25) 👨🏻‍❤️‍👨🏼 Couple With Heart - Man: Light Skin Tone, Man: Medium-Light Skin Tone 👨🏽‍👨🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏻‍❤️‍👩🏽 Couple With Heart - Woman: Light Skin Tone, Woman: Medium Skin Tone 🏴󠁭󠁸󠁢󠁣󠁳󠁿 Flag for Baja California Sur (MX-BCS) 🏴󠁥󠁧󠁢󠁮󠁳󠁿 Flag for Beni Suef (EG-BNS) 🏴󠁴󠁨󠀹󠀳󠁿 Flag for Phatthalung (TH-93) 🏴󠁴󠁺󠀲󠀵󠁿 Flag for Tanga (TZ-25) 🏴󠁭󠁡󠀰󠀴󠁿 Flag for Oriental (MA-04) 👨🏾‍👨🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👩🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁳󠁩󠀰󠀲󠀷󠁿 Flag for Gorenja Vas–Poljane (SI-027) 🏴󠁴󠁴󠁳󠁧󠁥󠁿 Flag for Sangre Grande (TT-SGE) 🏴󠁬󠁶󠀰󠀴󠀶󠁿 Flag for Koknese (LV-046) 🏴󠁳󠁩󠀰󠀸󠀶󠁿 Flag for Odranci (SI-086) 🏴󠁮󠁺󠁮󠁳󠁮󠁿 Flag for Nelson (NZ-NSN) 🏴󠁨󠁵󠁳󠁺󠁿 Flag for Szabolcs-Szatmár-Bereg (HU-SZ) 👩🏾‍❤️‍💋‍👨🏽 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁳󠁩󠀲󠀱󠀰󠁿 Flag for Sveti Jurij v Slovenskih Goricah (SI-210) ߷ NKo Symbol Gbakurunen 🏴󠁮󠁧󠁤󠁥󠁿 Flag for Delta (NG-DE) 🏴󠁭󠁤󠁣󠁳󠁿 Flag for Căușeni (MD-CS) 👩🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁣󠁵󠀹󠀹󠁿 Flag for Isla de la Juventud (CU-99) 🏴󠁫󠁨󠀲󠀰󠁿 Flag for Svay Rieng (KH-20) 🏴󠁴󠁤󠁨󠁬󠁿 Flag for Hadjer-Lamis (TD-HL) 🏴󠁪󠁰󠀲󠀱󠁿 Flag for Gifu (JP-21) 🏴󠁬󠁶󠀰󠀴󠀱󠁿 Flag for Jelgava Municipality (LV-041) 🏴󠁰󠁫󠁴󠁡󠁿 Flag for Federally Administered Tribal Areas (PK-TA) 🏴󠁭󠁴󠀶󠀲󠁿 Flag for Xewkija (MT-62) 🏴󠁭󠁲󠀱󠀰󠁿 Flag for Guidimaka (MR-10) 🏴󠁭󠁫󠀰󠀲󠁿 Flag for Aračinovo (MK-02) 🏴󠁳󠁩󠀲󠀰󠀸󠁿 Flag for Log–Dragomer (SI-208) 🏴󠁳󠁩󠀱󠀲󠀵󠁿 Flag for Šmartno ob Paki (SI-125) 🏴󠁣󠁯󠁤󠁣󠁿 Flag for Capital District (CO-DC) 🏴󠁬󠁶󠀱󠀰󠀶󠁿 Flag for Ventspils Municipality (LV-106) 🏴󠁭󠁶󠁳󠁣󠁿 Flag for South Central Province (MV-SC) 🏴󠁩󠁮󠁡󠁳󠁿 Flag for Assam (IN-AS) 🏴󠁬󠁴󠀰󠀲󠁿 Flag for Alytus Municipality (LT-02) 🏴󠁶󠁮󠀶󠀶󠁿 Flag for Hưng Yên (VN-66) 👨🏻‍👨🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨🏼‍👨🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁧󠁴󠁳󠁭󠁿 Flag for San Marcos (GT-SM) 👨🏼‍👨🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁤󠁥󠁳󠁨󠁿 Flag for Schleswig-Holstein (DE-SH) 👨‍👨‍👶‍👧 Family: Man, Man, Baby, Girl ️ Variation Selector-16 👨🏽‍👨🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👨🏻‍👨🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone 👨🏼‍👨🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏽‍👨🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone 👩‍❤️‍👨🏿 Couple With Heart - Woman, Man: Dark Skin Tone 🏴󠁥󠁳󠁣󠁢󠁿 Flag for Cantabria (ES-CB) 🏴󠁳󠁳󠁵󠁹󠁿 Flag for Unity (SS-UY) 👩🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👨🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏼‍👨🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👩🏽‍👨🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩🏼‍👨🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏻‍👨🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👨🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone 👩🏻‍👨🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏼‍👨🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏻‍👨🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍👨🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👨🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏻‍👨🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍👨🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👨🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍👨🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏼‍👩🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏻‍👩🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👩🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍👩🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👩🏽‍👩🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏻‍👩🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👩🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👩🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏽‍👩🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏻‍👩🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👩🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone 👨🏾‍👩🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏼‍👩🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👩🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁩󠁤󠁭󠁬󠁿 Flag for Maluku Islands (ID-ML) 👩🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁤󠁫󠀸󠀳󠁿 Flag for Southern Denmark (DK-83) 🏴󠁭󠁫󠀸󠀵󠁿 Flag for Skopje (MK-85) 👨🏼‍❤️‍💋‍👩 Kiss - Man: Medium-Light Skin Tone, Woman 🏴󠁰󠁴󠀰󠀲󠁿 Flag for Beja (PT-02) 🏴󠁩󠁴󠀸󠀸󠁿 Flag for Sardinia (IT-88) 🏴󠁤󠁥󠁢󠁹󠁿 Flag for Bavaria (DE-BY) 🏴󠁰󠁧󠁥󠁢󠁲󠁿 Flag for East New Britain (PG-EBR) 🏴󠁩󠁴󠀳󠀲󠁿 Flag for Trentino-South Tyrol (IT-32) 🏴󠁵󠁳󠁴󠁮󠁿 Flag for Tennessee (US-TN) 🏴󠁣󠁡󠁳󠁫󠁿 Flag for Saskatchewan (CA-SK) 🏴󠁴󠁶󠁦󠁵󠁮󠁿 Flag for Funafuti (TV-FUN) 🏴󠁴󠁪󠁧󠁢󠁿 Flag for Gorno-Badakhshan (TJ-GB) 🏴󠁳󠁯󠁢󠁮󠁿 Flag for Banaadir (SO-BN) 🏴󠁳󠁩󠀱󠀰󠀰󠁿 Flag for Radenci (SI-100) 🏴󠁤󠁥󠁢󠁷󠁿 Flag for Baden-Württemberg (DE-BW) 👩🏿‍👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁶󠁥󠁧󠁿 Flag for Carabobo (VE-G) ‍ Zero Width Joiner 🏴󠁫󠁥󠀳󠀱󠁿 Flag for Nakuru (KE-31) 🏴󠁴󠁧󠁭󠁿 Flag for Maritime (TG-M) 🏴󠁮󠁧󠁢󠁯󠁿 Flag for Borno (NG-BO) 🏴󠁭󠁤󠁳󠁮󠁿 Flag for Transnistria (MD-SN) 🏴󠁩󠁲󠀰󠀷󠁿 Flag for Tehran (IR-07) 🏴󠁲󠁵󠁤󠁡󠁿 Flag for Dagestan (RU-DA) 🏴󠁯󠁭󠁷󠁵󠁿 Flag for Al Wusta (OM-WU) 🏴󠁣󠁺󠀴󠀲󠁿 Flag for Ústecký kraj (CZ-42) 🏴󠁭󠁹󠀱󠀴󠁿 Flag for Kuala Lumpur (MY-14) 🏴󠁰󠁥󠁡󠁹󠁡󠁿 Flag for Ayacucho (PE-AYA) 🏴󠁵󠁡󠀳󠀰󠁿 Flag for Kiev (UA-30) 🏴󠁡󠁧󠀰󠀸󠁿 Flag for Saint Philip (AG-08) 🏴󠁭󠁴󠀲󠀹󠁿 Flag for Mdina (MT-29) 🏴󠁧󠁢󠁮󠁩󠁲󠁿 Flag for Northern Ireland (GB-NIR) 🏴󠁦󠁲󠁡󠁲󠁡󠁿 Flag for Auvergne-Rhône-Alpes (FR-ARA) 🏴󠁭󠁸󠁤󠁵󠁲󠁿 Flag for Durango (MX-DUR) 👨🏼‍👩🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁬󠁫󠀵󠁿 Flag for Eastern (LK-5) 🏴󠁮󠁧󠁯󠁧󠁿 Flag for Ogun (NG-OG) 🏴󠁬󠁹󠁪󠁩󠁿 Flag for Jafara (LY-JI) 🏴󠁳󠁥󠁭󠁿 Flag for Skåne (SE-M) 👨🏽‍👩🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁲󠁭󠁳󠁿 Flag for Mato Grosso do Sul (BR-MS) 🏴󠁧󠁴󠁳󠁲󠁿 Flag for Santa Rosa (GT-SR) 👨🏼‍👩🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁳󠁩󠀱󠀵󠀱󠁿 Flag for Braslovče (SI-151) 🏴󠁰󠁴󠀳󠀰󠁿 Flag for Madeira (PT-30) 🏴󠁳󠁶󠁳󠁶󠁿 Flag for San Vicente (SV-SV) 🏴󠁩󠁲󠀳󠀲󠁿 Flag for Alborz (IR-32) 🏴󠁷󠁳󠁦󠁡󠁿 Flag for Fa’asaleleaga (WS-FA) 👨🏼‍👨🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁣󠁡󠁮󠁬󠁿 Flag for Newfoundland and Labrador (CA-NL) 🏴󠁧󠁲󠁪󠁿 Flag for Peloponnese (GR-J) 🏴󠁮󠁬󠁳󠁸󠁿 Flag for Sint Maarten (NL-SX) 🏴󠁭󠁴󠀴󠀸󠁿 Flag for St. Julian’s (MT-48) 🏴󠁮󠁧󠁡󠁤󠁿 Flag for Adamawa (NG-AD) 👩🏿‍👩🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁳󠁴󠁳󠁿 Flag for São Tomé (ST-S) 👩🏻‍👩🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁬󠁶󠀰󠀱󠀰󠁿 Flag for Auce (LV-010) 🏴󠁰󠁨󠀱󠀵󠁿 Flag for Cordillera Administrative (PH-15) 🏴󠁪󠁰󠀱󠀸󠁿 Flag for Fukui (JP-18) 👨🏿‍👩🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁧󠁥󠁫󠁡󠁿 Flag for Kakheti (GE-KA) 🏴󠁫󠁲󠀴󠀹󠁿 Flag for Jeju (KR-49) 🏴󠁭󠁡󠀱󠀳󠁿 Flag for Souss-Massa-Drâa (MA-13) 🏴󠁬󠁶󠀰󠀳󠀷󠁿 Flag for Inčukalns (LV-037) 🏴󠁦󠁲󠁴󠁦󠁿 Flag for French Southern Territories (FR-TF) 🏴󠁭󠁸󠁲󠁯󠁯󠁿 Flag for Quintana Roo (MX-ROO) 👩🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👨🏾‍👨🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁨󠁵󠁧󠁳󠁿 Flag for Győr-Moson-Sopron (HU-GS) 👩🏿‍👩🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone  Shibuya 👩‍❤️‍👨🏽 Couple With Heart - Woman, Man: Medium Skin Tone 🏴󠁷󠁳󠁧󠁩󠁿 Flag for Gaga’ifomauga (WS-GI) 🏴󠁨󠁴󠁮󠁥󠁿 Flag for Nord-Est (HT-NE) 🏴󠁳󠁧󠀰󠀱󠁿 Flag for Central Singapore (SG-01) 🏴󠁥󠁣󠁴󠁿 Flag for Tungurahua (EC-T) # Number Sign 👨🏻‍👨🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 1 Digit One 🏴󠁢󠁯󠁴󠁿 Flag for Tarija (BO-T) 👨🏾‍👩🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁩󠁣󠁩󠁿 Flag for Cibitoke (BI-CI) 🏴󠁭󠁶󠁵󠁳󠁿 Flag for Upper South Province (MV-US) 🏴󠁡󠁤󠀰󠀲󠁿 Flag for Canillo (AD-02) 🏴󠁡󠁦󠁢󠁡󠁭󠁿 Flag for Bamyan (AF-BAM) 🏴󠁡󠁤󠀰󠀳󠁿 Flag for Encamp (AD-03) 🏴󠁵󠁳󠁭󠁰󠁿 Flag for Northern Mariana Islands (US-MP) 🏴󠁬󠁶󠀰󠀱󠀲󠁿 Flag for Babīte (LV-012) 🏴󠁥󠁣󠁸󠁿 Flag for Cotopaxi (EC-X) 🏴󠁧󠁡󠀴󠁿 Flag for Ngounié (GA-4) * Asterisk 󠁺 Tag Latin Small Letter Z 🏴󠁡󠁤󠀰󠀴󠁿 Flag for La Massana (AD-04) 󠀳 Tag Digit Three 👩🏼‍❤️‍💋‍👩🏻 Kiss - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone 🏴󠁭󠁥󠀰󠀳󠁿 Flag for Berane (ME-03) 👨🏿‍❤️‍💋‍👨🏽 Kiss - Man: Dark Skin Tone, Man: Medium Skin Tone 🏴󠁤󠁯󠀳󠀷󠁿 Flag for El Valle (DO-37) 👩🏾‍❤️‍👩🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁫󠁥󠀰󠀱󠁿 Flag for Baringo (KE-01) 🏴󠁹󠁥󠁳󠁡󠁿 Flag for Amanat Al Asimah (YE-SA) 👨🏼‍👨🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 󠀲 Tag Digit Two 🏴󠁭󠁴󠀲󠀰󠁿 Flag for Senglea (MT-20) 🕴️‍♀️ Woman in Business Suit Levitating 🏴󠁣󠁦󠁨󠁭󠁿 Flag for Haut-Mbomou (CF-HM) 󠀱 Tag Digit One 󠀴 Tag Digit Four 🏴󠁡󠁺󠁡󠁢󠁳󠁿 Flag for Absheron (AZ-ABS) 6 Digit Six 🏴󠁬󠁡󠁳󠁶󠁿 Flag for Savannakhet (LA-SV) 🏴󠁭󠁬󠀱󠁿 Flag for Kayes (ML-1) 🏴󠁡󠁥󠁡󠁺󠁿 Flag for Abu Dhabi (AE-AZ) 🏴󠁥󠁳󠁡󠁳󠁿 Flag for Asturias (ES-AS) 🏴󠁩󠁱󠁫󠁩󠁿 Flag for Kirkuk (IQ-KI) 👩‍❤️‍👩🏽 Couple With Heart - Woman, Woman: Medium Skin Tone 🏴󠁤󠁥󠁢󠁥󠁿 Flag for Berlin (DE-BE) 8 Digit Eight 🏴󠁡󠁤󠀰󠀸󠁿 Flag for Escaldes-Engordany (AD-08) 🏴󠁣󠁮󠀶󠀴󠁿 Flag for Ningxia (CN-64) 🏴󠁥󠁣󠁦󠁿 Flag for Cañar (EC-F) 🏴󠁡󠁥󠁡󠁪󠁿 Flag for Ajman (AE-AJ) 🕴🏻‍♀️ Woman in Business Suit Levitating: Light Skin Tone 👨🏻‍❤️‍💋‍👩 Kiss - Man: Light Skin Tone, Woman 󠀸 Tag Digit Eight 🏴󠁩󠁲󠀱󠀴󠁿 Flag for Fars (IR-14) 🏴󠁡󠁥󠁦󠁵󠁿 Flag for Fujairah (AE-FU) 👨🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁨󠁲󠀱󠀰󠁿 Flag for Virovitica-Podravina (HR-10) 󠁩 Tag Latin Small Letter I 7 Digit Seven 󠀷 Tag Digit Seven 󠁥 Tag Latin Small Letter E 👩🏼‍👩🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁭󠁨󠁴󠁿 Flag for Ratak Chain (MH-T) 🏴󠁡󠁥󠁳󠁨󠁿 Flag for Sharjah (AE-SH) 󠁦 Tag Latin Small Letter F 🏴󠁬󠁴󠀵󠀷󠁿 Flag for Vilniaus Municipality (LT-57) 🏴󠁩󠁳󠀴󠁿 Flag for Westfjords (IS-4) 🏴󠁣󠁡󠁢󠁣󠁿 Flag for British Columbia (CA-BC) 4 Digit Four 🏴󠁡󠁦󠁢󠁡󠁬󠁿 Flag for Balkh (AF-BAL) 👨‍👶‍👦 Family: Man, Baby, Boy 🏴󠁴󠁷󠁨󠁳󠁱󠁿 Flag for Hsinchu County (TW-HSQ) 👩‍👶‍👧 Family: Woman, Baby, Girl 🏴󠁭󠁸󠁪󠁡󠁬󠁿 Flag for Jalisco (MX-JAL) 🏴󠁫󠁥󠀱󠀸󠁿 Flag for Kitui (KE-18) 🏴󠁰󠁴󠀲󠀰󠁿 Flag for Azores (PT-20) 🏴󠁩󠁮󠁭󠁮󠁿 Flag for Manipur (IN-MN) 🏴󠁡󠁦󠁢󠁤󠁳󠁿 Flag for Badakhshan (AF-BDS) 👩🏻‍❤️‍👩🏼 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁡󠁤󠀰󠀵󠁿 Flag for Ordino (AD-05) 👩🏽‍❤️‍💋‍👩 Kiss - Woman: Medium Skin Tone, Woman 🏴󠁡󠁦󠁢󠁧󠁬󠁿 Flag for Baghlan (AF-BGL) 🏴󠁮󠁧󠁣󠁲󠁿 Flag for Cross River (NG-CR) 🏴󠁵󠁳󠁣󠁯󠁿 Flag for Colorado (US-CO) 󠁴 Tag Latin Small Letter T 🏴󠁭󠁫󠀶󠀴󠁿 Flag for Radoviš (MK-64) 🏴󠁮󠁺󠁷󠁧󠁮󠁿 Flag for Wellington (NZ-WGN) 👨🏽‍👨🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁩󠁲󠀱󠀶󠁿 Flag for Kurdistan (IR-16) 👨🏽‍❤️‍💋‍👨🏿 Kiss - Man: Medium Skin Tone, Man: Dark Skin Tone 󠁳 Tag Latin Small Letter S 👩‍👶‍👶 Family: Woman, Baby, Baby 🏴󠁡󠁦󠁤󠁡󠁹󠁿 Flag for Daykundi (AF-DAY) 👨🏻‍❤️‍💋‍👨🏾 Kiss - Man: Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁡󠁦󠁦󠁲󠁡󠁿 Flag for Farah (AF-FRA) 󠁱 Tag Latin Small Letter Q 🏴󠁧󠁴󠁧󠁵󠁿 Flag for Guatemala (GT-GU) 🏴󠁣󠁨󠁴󠁧󠁿 Flag for Thurgau (CH-TG) 🏴󠁲󠁵󠁣󠁥󠁿 Flag for Chechen (RU-CE) 󠀵 Tag Digit Five 🏴󠁡󠁦󠁧󠁨󠁯󠁿 Flag for Ghōr (AF-GHO) 🏴󠁡󠁴󠀹󠁿 Flag for Vienna (AT-9) 🏴󠁡󠁦󠁧󠁨󠁡󠁿 Flag for Ghazni (AF-GHA) 󠁵 Tag Latin Small Letter U 🏴󠁢󠁷󠁧󠁡󠁿 Flag for Gaborone (BW-GA) 󠁹 Tag Latin Small Letter Y 󠁿 Cancel Tag 󠁷 Tag Latin Small Letter W 👩🏽‍❤️‍👩🏿 Couple With Heart - Woman: Medium Skin Tone, Woman: Dark Skin Tone 🏴󠁣󠁯󠁡󠁭󠁡󠁿 Flag for Amazonas (CO-AMA) 󠁮 Tag Latin Small Letter N 👩‍❤️‍💋‍👩🏽 Kiss - Woman, Woman: Medium Skin Tone 👨‍👶 Family: Man, Baby 🏴󠁡󠁴󠀱󠁿 Flag for Burgenland (AT-1) 🏴󠁡󠁦󠁨󠁥󠁬󠁿 Flag for Helmand (AF-HEL) 󠀶 Tag Digit Six 🏴󠁡󠁦󠁪󠁯󠁷󠁿 Flag for Jowzjan (AF-JOW) 🧕‍♀️ Woman With Headscarf 󠁢 Tag Latin Small Letter B 󠀰 Tag Digit Zero 🏴󠁡󠁦󠁨󠁥󠁲󠁿 Flag for Herat (AF-HER) 🏴󠁧󠁤󠀰󠀵󠁿 Flag for Saint Mark (GD-05) 3 Digit Three 󠁧 Tag Latin Small Letter G 🕴🏾‍♀️ Woman in Business Suit Levitating: Medium-Dark Skin Tone 👩🏽‍❤️‍💋‍👨🏽 Kiss - Woman: Medium Skin Tone, Man: Medium Skin Tone 🏴󠁵󠁳󠁡󠁫󠁿 Flag for Alaska (US-AK) 󠁲 Tag Latin Small Letter R 🏴󠁴󠁬󠁬󠁡󠁿 Flag for Lautém (TL-LA) 🏴󠁡󠁦󠁫󠁡󠁢󠁿 Flag for Kabul (AF-KAB) 👨‍❤️‍💋‍👨🏿 Kiss - Man, Man: Dark Skin Tone 🧕‍♂️ Man With Headscarf 󠁶 Tag Latin Small Letter V 󠁤 Tag Latin Small Letter D 🏴󠁡󠁦󠁫󠁡󠁮󠁿 Flag for Kandahar (AF-KAN) 🏴󠁡󠁦󠁫󠁡󠁰󠁿 Flag for Kapisa (AF-KAP) 🏴󠁭󠁣󠁳󠁲󠁿 Flag for Saint Roman (MC-SR) 🏴󠁥󠁥󠀳󠀹󠁿 Flag for Hiiu (EE-39) 󠁭 Tag Latin Small Letter M 🏴󠁡󠁦󠁫󠁨󠁯󠁿 Flag for Khost (AF-KHO) 🧕🏻‍♂️ Man With Headscarf: Light Skin Tone 🏴󠁡󠁦󠁫󠁤󠁺󠁿 Flag for Kunduz (AF-KDZ) 👩🏿‍❤️‍👨 Couple With Heart - Woman: Dark Skin Tone, Man 🏴󠁵󠁳󠁳󠁤󠁿 Flag for South Dakota (US-SD) 🏴󠁡󠁦󠁢󠁤󠁧󠁿 Flag for Badghis (AF-BDG) 🏴󠁩󠁳󠀸󠁿 Flag for Southern (IS-8) 🏴󠁡󠁦󠁫󠁮󠁲󠁿 Flag for Kunar (AF-KNR) 👨‍👨‍👶‍👶 Family: Man, Man, Baby, Baby 🏴󠁪󠁰󠀱󠀳󠁿 Flag for Tokyo (JP-13) 🏴󠁡󠁦󠁬󠁡󠁧󠁿 Flag for Laghman (AF-LAG) 🧕🏽‍♂️ Man With Headscarf: Medium Skin Tone 🏴󠁡󠁦󠁬󠁯󠁧󠁿 Flag for Logar (AF-LOG) 5 Digit Five 󠁣 Tag Latin Small Letter C 🏴󠁡󠁦󠁦󠁹󠁢󠁿 Flag for Faryab (AF-FYB) 󠁰 Tag Latin Small Letter P 🏴󠁡󠁦󠁮󠁡󠁮󠁿 Flag for Nangarhar (AF-NAN) 󠀹 Tag Digit Nine 🏴󠁥󠁳󠁮󠁣󠁿 Flag for Navarra Chartered Community (ES-NC) 👩🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁭󠁸󠁮󠁡󠁹󠁿 Flag for Nayarit (MX-NAY) 🏴󠁢󠁲󠁰󠁥󠁿 Flag for Pernambuco (BR-PE) 🏴󠁩󠁴󠀷󠀲󠁿 Flag for Campania (IT-72) 🧕🏾‍♂️ Man With Headscarf: Medium-Dark Skin Tone 👩🏽‍❤️‍💋‍👩🏾 Kiss - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁦󠁮󠁵󠁲󠁿 Flag for Nuristan (AF-NUR) 👨‍👨‍👧‍👶 Family: Man, Man, Girl, Baby 🏴󠁰󠁧󠁷󠁢󠁫󠁿 Flag for West New Britain (PG-WBK) 👨🏼‍👩🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁧󠁹󠁵󠁤󠁿 Flag for Upper Demerara-Berbice (GY-UD) 👨‍❤️‍💋‍👩 Kiss - Man, Woman 🏴󠁥󠁴󠁡󠁦󠁿 Flag for Afar (ET-AF) 🏴󠁡󠁦󠁰󠁡󠁲󠁿 Flag for Parwan (AF-PAR) 🏴󠁡󠁦󠁮󠁩󠁭󠁿 Flag for Nimruz (AF-NIM) 🏴󠁨󠁲󠀰󠀴󠁿 Flag for Karlovac (HR-04) 🏴󠁡󠁦󠁰󠁩󠁡󠁿 Flag for Paktia (AF-PIA) 🧕🏿‍♂️ Man With Headscarf: Dark Skin Tone 🧕🏼‍♂️ Man With Headscarf: Medium-Light Skin Tone 🏴󠁭󠁸󠁢󠁣󠁮󠁿 Flag for Baja California (MX-BCN) 🏴󠁡󠁦󠁰󠁫󠁡󠁿 Flag for Paktika (AF-PKA) 🏴󠁫󠁩󠁰󠁿 Flag for Phoenix Islands (KI-P) 󠁯 Tag Latin Small Letter O 🏴󠁡󠁦󠁰󠁡󠁮󠁿 Flag for Panjshir (AF-PAN) 🏴󠁣󠁨󠁴󠁩󠁿 Flag for Ticino (CH-TI) 🏴󠁳󠁩󠀱󠀹󠀲󠁿 Flag for Žirovnica (SI-192) 🏴󠁳󠁥󠁮󠁿 Flag for Halland (SE-N) 󠁪 Tag Latin Small Letter J 👩🏽‍❤️‍💋‍👩🏻 Kiss - Woman: Medium Skin Tone, Woman: Light Skin Tone 👨🏾‍👨🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁳󠁳󠁢󠁮󠁿 Flag for Northern Bahr el Ghazal (SS-BN) 👨🏽‍❤️‍💋‍👩 Kiss - Man: Medium Skin Tone, Woman 🏴󠁣󠁦󠁢󠁫󠁿 Flag for Basse-Kotto (CF-BK) 👨‍❤️‍👨🏻 Couple With Heart - Man, Man: Light Skin Tone 👨🏽‍❤️‍👨 Couple With Heart - Man: Medium Skin Tone, Man 🏴󠁬󠁹󠁢󠁵󠁿 Flag for Butnan (LY-BU) 👩‍👶 Family: Woman, Baby 🏴󠁬󠁫󠀹󠁿 Flag for Sabaragamuwa (LK-9) 🏴󠁡󠁦󠁳󠁡󠁭󠁿 Flag for Samangan (AF-SAM) 🏴󠁴󠁶󠁮󠁫󠁬󠁿 Flag for Nukulaelae (TV-NKL) 🏴󠁡󠁥󠁲󠁫󠁿 Flag for Ras al-Khaimah (AE-RK) 🏴󠁥󠁳󠁣󠁥󠁿 Flag for Ceuta (ES-CE) 🏴󠁡󠁥󠁤󠁵󠁿 Flag for Dubai (AE-DU) 👨🏻‍👨🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 🏴󠁪󠁰󠀴󠀷󠁿 Flag for Okinawa (JP-47) 🏴󠁡󠁦󠁳󠁡󠁲󠁿 Flag for Sar-e Pol (AF-SAR) 👩🏼‍👩🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 󠁬 Tag Latin Small Letter L 🏴󠁡󠁦󠁵󠁲󠁵󠁿 Flag for Urozgan (AF-URU) 9 Digit Nine 👩🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨‍❤️‍💋‍👨🏽 Kiss - Man, Man: Medium Skin Tone 🏴󠁤󠁭󠀰󠀶󠁿 Flag for Saint Joseph (DM-06) 🏴󠁡󠁧󠀰󠀴󠁿 Flag for Saint John (AG-04) 🏴󠁣󠁯󠁶󠁩󠁤󠁿 Flag for Vichada (CO-VID) 🏴󠁰󠁷󠀲󠀱󠀸󠁿 Flag for Ngarchelong (PW-218) 🏴󠁲󠁵󠁡󠁲󠁫󠁿 Flag for Arkhangelsk (RU-ARK) 🏴󠁡󠁦󠁺󠁡󠁢󠁿 Flag for Zabul (AF-ZAB) 🏴󠁡󠁧󠀰󠀳󠁿 Flag for Saint George (AG-03) 🏴󠁩󠁴󠀲󠀵󠁿 Flag for Lombardy (IT-25) 👨🏻‍❤️‍💋‍👨🏻 Kiss - Man: Light Skin Tone, Man: Light Skin Tone 🏴󠁣󠁺󠀵󠀳󠁿 Flag for Pardubický kraj (CZ-53) 🏴󠁡󠁧󠀰󠀶󠁿 Flag for Saint Paul (AG-06) 🏴󠁶󠁮󠀵󠀱󠁿 Flag for Trà Vinh (VN-51) 👩‍👨‍👶‍👧 Family: Woman, Man, Baby, Girl 🏴󠁫󠁲󠀴󠀸󠁿 Flag for South Gyeongsang (KR-48) 🏴󠁡󠁧󠀰󠀵󠁿 Flag for Saint Mary (AG-05) 🏴󠁧󠁲󠁫󠁿 Flag for North Aegean (GR-K) 👩‍👩‍👶‍👧 Family: Woman, Woman, Baby, Girl 🏴󠁥󠁣󠁺󠁿 Flag for Zamora-Chinchipe (EC-Z) 🏴󠁮󠁩󠁭󠁳󠁿 Flag for Masaya (NI-MS) 🏴󠁫󠁩󠁧󠁿 Flag for Gilbert Islands (KI-G) 🏴󠁭󠁸󠁣󠁨󠁨󠁿 Flag for Chihuahua (MX-CHH) 👨🏼‍👨🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👨🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩‍👨‍👶‍👶 Family: Woman, Man, Baby, Baby 🏴󠁡󠁧󠀱󠀱󠁿 Flag for Redonda (AG-11) 👩‍👩‍👶 Family: Woman, Woman, Baby 👨‍❤️‍💋‍👩🏻 Kiss - Man, Woman: Light Skin Tone 👨‍❤️‍💋‍👨🏾 Kiss - Man, Man: Medium-Dark Skin Tone 🏴󠁡󠁬󠀰󠀱󠁿 Flag for Berat County (AL-01) 󠁡 Tag Latin Small Letter A 🏴󠁡󠁧󠀱󠀰󠁿 Flag for Barbuda (AG-10) 🏴󠁣󠁯󠁳󠁡󠁰󠁿 Flag for San Andrés & Providencia (CO-SAP) 🏴󠁡󠁬󠀰󠀳󠁿 Flag for Elbasan County (AL-03) 👨🏾‍👨🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁩󠁮󠁫󠁡󠁿 Flag for Karnataka (IN-KA) 🏴󠁡󠁬󠀰󠀵󠁿 Flag for Gjirokastër County (AL-05) 🏴󠁪󠁰󠀰󠀱󠁿 Flag for Hokkaidō (JP-01) 👩🏾‍👨🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁵󠁧󠁣󠁿 Flag for Central (UG-C) 👨🏼‍❤️‍💋‍👨 Kiss - Man: Medium-Light Skin Tone, Man 🏴󠁡󠁬󠀰󠀲󠁿 Flag for Durrës County (AL-02) 🏴󠁡󠁬󠀰󠀴󠁿 Flag for Fier County (AL-04) 🏴󠁡󠁬󠀰󠀶󠁿 Flag for Korçë County (AL-06) 🏴󠁰󠁹󠀱󠀶󠁿 Flag for Alto Paraguay (PY-16) 🏴󠁡󠁬󠀰󠀷󠁿 Flag for Kukës County (AL-07) 👨🏿‍❤️‍💋‍👨 Kiss - Man: Dark Skin Tone, Man 🏴󠁧󠁹󠁵󠁴󠁿 Flag for Upper Takutu-Upper Essequibo (GY-UT) 👨🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍👨🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁡󠁬󠀰󠀹󠁿 Flag for Dibër County (AL-09) 🏴󠁡󠁬󠀰󠀸󠁿 Flag for Lezhë County (AL-08) 👨🏼‍👨🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁡󠁬󠀱󠀱󠁿 Flag for Tirana County (AL-11) 🏴󠁡󠁤󠀰󠀶󠁿 Flag for Sant Julià de Lòria (AD-06) 🏴󠁢󠁲󠁢󠁡󠁿 Flag for Bahia (BR-BA) 🏴󠁡󠁬󠀱󠀰󠁿 Flag for Shkodër County (AL-10) 👩‍❤️‍💋‍👨🏿 Kiss - Woman, Man: Dark Skin Tone 👨🏽‍👨🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩‍❤️‍💋‍👨🏽 Kiss - Woman, Man: Medium Skin Tone 🏴󠁡󠁬󠀱󠀲󠁿 Flag for Vlorë County (AL-12) 🏴󠁴󠁨󠀲󠀳󠁿 Flag for Trat (TH-23) 🏴󠁡󠁭󠁧󠁲󠁿 Flag for Gegharkunik (AM-GR) 👨🏿‍👨🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁡󠁭󠁡󠁧󠁿 Flag for Aragatsotn (AM-AG) 🏴󠁡󠁭󠁡󠁲󠁿 Flag for Ararat (AM-AR) 🏴󠁡󠁭󠁥󠁲󠁿 Flag for Yerevan (AM-ER) 🏴󠁡󠁭󠁫󠁴󠁿 Flag for Kotayk (AM-KT) 🏴󠁦󠁲󠁣󠁯󠁲󠁿 Flag for Corse (FR-COR) 🏴󠁡󠁭󠁡󠁶󠁿 Flag for Armavir (AM-AV) 👩‍❤️‍💋‍👩🏿 Kiss - Woman, Woman: Dark Skin Tone 🏴󠁢󠁲󠁭󠁧󠁿 Flag for Minas Gerais (BR-MG) 🏴󠁣󠁧󠀱󠀶󠁿 Flag for Pointe-Noire (CG-16) 🏴󠁡󠁭󠁬󠁯󠁿 Flag for Lori (AM-LO) 🏴󠁤󠁺󠀲󠀱󠁿 Flag for Skikda (DZ-21) 🏴󠁡󠁭󠁳󠁨󠁿 Flag for Shirak (AM-SH) 👩‍❤️‍💋‍👩🏾 Kiss - Woman, Woman: Medium-Dark Skin Tone 🏴󠁡󠁤󠀰󠀷󠁿 Flag for Andorra la Vella (AD-07) 🏴󠁲󠁵󠁡󠁬󠁴󠁿 Flag for Altai Krai (RU-ALT) 🏴󠁳󠁩󠀱󠀶󠀷󠁿 Flag for Lovrenc na Pohorju (SI-167) 👩‍❤️‍💋‍👩🏼 Kiss - Woman, Woman: Medium-Light Skin Tone 👨🏿‍❤️‍💋‍👩🏻 Kiss - Man: Dark Skin Tone, Woman: Light Skin Tone 🏴󠁬󠁴󠁰󠁮󠁿 Flag for Panevėžys County (LT-PN) 🏴󠁤󠁯󠀳󠀵󠁿 Flag for Cibao Norte (DO-35) 🏴󠁮󠁯󠀱󠀰󠁿 Flag for Vest-Agder (NO-10) 👨‍❤️‍💋‍👩🏿 Kiss - Man, Woman: Dark Skin Tone 🏴󠁡󠁭󠁶󠁤󠁿 Flag for Vayots Dzor (AM-VD) 👩🏻‍❤️‍💋‍👩🏻 Kiss - Woman: Light Skin Tone, Woman: Light Skin Tone 🏴󠁵󠁳󠁶󠁴󠁿 Flag for Vermont (US-VT) 👨🏽‍❤️‍💋‍👨 Kiss - Man: Medium Skin Tone, Man 🏴󠁡󠁯󠁢󠁧󠁯󠁿 Flag for Bengo (AO-BGO) 👩🏻‍❤️‍💋‍👩 Kiss - Woman: Light Skin Tone, Woman 🏴󠁣󠁯󠁭󠁥󠁴󠁿 Flag for Meta (CO-MET) 🏴󠁮󠁬󠁢󠁱󠀲󠁿 Flag for Saba (NL-BQ2) 👩🏽‍❤️‍💋‍👩🏼 Kiss - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone 👨🏽‍👩🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁡󠁯󠁢󠁧󠁵󠁿 Flag for Benguela (AO-BGU) 🏴󠁣󠁯󠁳󠁵󠁣󠁿 Flag for Sucre (CO-SUC) 🏴󠁡󠁯󠁣󠁣󠁵󠁿 Flag for Cuando Cubango (AO-CCU) 🏴󠁰󠁥󠁭󠁤󠁤󠁿 Flag for Madre de Dios (PE-MDD) 🏴󠁣󠁨󠁶󠁤󠁿 Flag for Vaud (CH-VD) 🏴󠁡󠁯󠁢󠁩󠁥󠁿 Flag for Bié (AO-BIE) 🏴󠁡󠁯󠁣󠁡󠁢󠁿 Flag for Cabinda (AO-CAB) 🏴󠁡󠁯󠁨󠁵󠁩󠁿 Flag for Huíla (AO-HUI) 🏴󠁡󠁯󠁣󠁵󠁳󠁿 Flag for Cuanza Sul (AO-CUS) 👨‍❤️‍💋‍👩🏽 Kiss - Man, Woman: Medium Skin Tone 👩‍👩‍👦‍👶 Family: Woman, Woman, Boy, Baby 🏴󠁡󠁯󠁨󠁵󠁡󠁿 Flag for Huambo (AO-HUA) 👨🏼‍❤️‍👩🏾 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁣󠁹󠀰󠀶󠁿 Flag for Kyrenia (CY-06) 👩🏼‍❤️‍💋‍👨🏻 Kiss - Woman: Medium-Light Skin Tone, Man: Light Skin Tone 🏴󠁡󠁥󠁵󠁱󠁿 Flag for Umm al-Quwain (AE-UQ) 🏴󠁡󠁯󠁬󠁳󠁵󠁿 Flag for Lunda Sul (AO-LSU) 🏴󠁬󠁲󠁣󠁭󠁿 Flag for Grand Cape Mount (LR-CM) 🏴󠁡󠁯󠁬󠁮󠁯󠁿 Flag for Lunda Norte (AO-LNO) 👩🏽‍❤️‍👨🏿 Couple With Heart - Woman: Medium Skin Tone, Man: Dark Skin Tone 👨🏾‍❤️‍👩🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁯󠁣󠁮󠁯󠁿 Flag for Cuanza Norte (AO-CNO) 🏴󠁡󠁯󠁭󠁡󠁬󠁿 Flag for Malanje (AO-MAL) 👩🏼‍❤️‍💋‍👩 Kiss - Woman: Medium-Light Skin Tone, Woman 👨🏼‍👩🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁡󠁯󠁭󠁯󠁸󠁿 Flag for Moxico (AO-MOX) 🏴󠁡󠁯󠁮󠁡󠁭󠁿 Flag for Namibe (AO-NAM) 👨🏾‍👩🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 󠁫 Tag Latin Small Letter K 🕴🏼‍♀️ Woman in Business Suit Levitating: Medium-Light Skin Tone 🏴󠁡󠁲󠁡󠁿 Flag for Salta (AR-A) 👨🏾‍👩🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁣󠁤󠁬󠁵󠁿 Flag for Lualaba (CD-LU) 🏴󠁡󠁲󠁢󠁿 Flag for Buenos Aires Province (AR-B) 👨🏿‍👩🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁡󠁲󠁤󠁿 Flag for San Luis (AR-D) 🏴󠁡󠁯󠁺󠁡󠁩󠁿 Flag for Zaire (AO-ZAI) 🏴󠁴󠁲󠀰󠀳󠁿 Flag for Afyonkarahisar (TR-03) 0 Digit Zero 🏴󠁶󠁮󠀲󠀵󠁿 Flag for Quảng Trị (VN-25) 🕴🏿‍♀️ Woman in Business Suit Levitating: Dark Skin Tone 🏴󠁡󠁯󠁵󠁩󠁧󠁿 Flag for Uíge (AO-UIG) 👩🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁵󠁡󠀱󠀸󠁿 Flag for Zhytomyrshchyna (UA-18) 👨🏾‍❤️‍💋‍👨🏽 Kiss - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁣󠁯󠁣󠁥󠁳󠁿 Flag for Cesar (CO-CES) 🏴󠁡󠁭󠁳󠁵󠁿 Flag for Syunik (AM-SU) 🏴󠁡󠁲󠁥󠁿 Flag for Entre Ríos (AR-E) 👨🏿‍❤️‍💋‍👩 Kiss - Man: Dark Skin Tone, Woman 🏴󠁡󠁲󠁦󠁿 Flag for La Rioja (AR-F) 🏴󠁫󠁺󠁶󠁯󠁳󠁿 Flag for East Kazakhstan (KZ-VOS) 🏴󠁡󠁦󠁷󠁡󠁲󠁿 Flag for Maidan Wardak (AF-WAR) 🏴󠁡󠁲󠁪󠁿 Flag for San Juan (AR-J) 👩🏾‍👩🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁡󠁯󠁬󠁵󠁡󠁿 Flag for Luanda (AO-LUA) 🏴󠁡󠁲󠁬󠁿 Flag for La Pampa (AR-L) 👩🏼‍❤️‍💋‍👩🏽 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone 👨🏼‍👩🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏼‍👩🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁡󠁲󠁫󠁿 Flag for Catamarca (AR-K) 🏴󠁡󠁲󠁲󠁿 Flag for Río Negro (AR-R) 🏴󠁡󠁲󠁨󠁿 Flag for Chaco (AR-H) 🏴󠁡󠁲󠁰󠁿 Flag for Formosa (AR-P) 🏴󠁡󠁲󠁭󠁿 Flag for Mendoza (AR-M) 🏴󠁡󠁲󠁮󠁿 Flag for Misiones (AR-N) 🏴󠁡󠁲󠁱󠁿 Flag for Neuquén (AR-Q) 👨🏽‍👩🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁡󠁲󠁴󠁿 Flag for Tucumán (AR-T) 🏴󠁡󠁲󠁳󠁿 Flag for Santa Fe (AR-S) 🏴󠁡󠁲󠁷󠁿 Flag for Corrientes (AR-W) 🏴󠁡󠁲󠁹󠁿 Flag for Jujuy (AR-Y) 🏴󠁡󠁲󠁶󠁿 Flag for Tierra del Fuego (AR-V) 🏴󠁡󠁲󠁵󠁿 Flag for Chubut (AR-U) 🏴󠁡󠁲󠁸󠁿 Flag for Córdoba (AR-X) 🏴󠁡󠁲󠁺󠁿 Flag for Santa Cruz (AR-Z) 🏴󠁡󠁲󠁧󠁿 Flag for Santiago del Estero (AR-G) 🏴󠁡󠁴󠀲󠁿 Flag for Carinthia (AT-2) 🏴󠁣󠁨󠁢󠁬󠁿 Flag for Basel-Landschaft (CH-BL) 👩🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍👩🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨‍👨‍👦‍👧 Family: Man, Man, Boy, Girl 🏴󠁡󠁴󠀳󠁿 Flag for Lower Austria (AT-3) 👩‍👶‍👦 Family: Woman, Baby, Boy 🏴󠁭󠁲󠀱󠀳󠁿 Flag for Nouakchott Ouest (MR-13) 👨🏼‍👩🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁣󠁦󠁭󠁢󠁿 Flag for Mbomou (CF-MB) 🏴󠁡󠁴󠀶󠁿 Flag for Styria (AT-6) 🏴󠁰󠁨󠀰󠀱󠁿 Flag for Ilocos (PH-01) 🏴󠁡󠁴󠀷󠁿 Flag for Tyrol (AT-7) 🏴󠁣󠁮󠀵󠀲󠁿 Flag for Guizhou (CN-52) 🏴󠁬󠁡󠁸󠁳󠁿 Flag for Xaisomboun (LA-XS) 🏴󠁡󠁴󠀸󠁿 Flag for Vorarlberg (AT-8) 👨🏼‍👨🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁡󠁴󠀵󠁿 Flag for Salzburg (AT-5) 👨🏿‍👩🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩‍👩‍👶‍👶 Family: Woman, Woman, Baby, Baby 👩‍👨‍👧‍👦 Family: Woman, Man, Girl, Boy 👩‍👨‍👧 Family: Woman, Man, Girl 👩‍👦‍👶 Family: Woman, Boy, Baby 🏴󠁡󠁵󠁮󠁳󠁷󠁿 Flag for New South Wales (AU-NSW) 👩‍👨‍👧‍👶 Family: Woman, Man, Girl, Baby 👩🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁡󠁵󠁮󠁴󠁿 Flag for Northern Territory (AU-NT) 👩🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁡󠁵󠁱󠁬󠁤󠁿 Flag for Queensland (AU-QLD) 2 Digit Two 👩‍👨‍👧‍👧 Family: Woman, Man, Girl, Girl 👩🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁡󠁴󠀴󠁿 Flag for Upper Austria (AT-4) 🏴󠁧󠁲󠁡󠁿 Flag for East Macedonia and Thrace (GR-A) 👨🏽‍👩🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👩🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨‍👶‍👧 Family: Man, Baby, Girl 👨🏻‍👩🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone 👨🏿‍👩🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👩‍👨‍👶 Family: Woman, Man, Baby 🏴󠁵󠁳󠁮󠁥󠁿 Flag for Nebraska (US-NE) 🏴󠁡󠁺󠁡󠁧󠁡󠁿 Flag for Agstafa (AZ-AGA) 🏴󠁡󠁦󠁴󠁡󠁫󠁿 Flag for Takhar (AF-TAK) 🏴󠁡󠁵󠁷󠁡󠁿 Flag for Western Australia (AU-WA) 🏴󠁡󠁺󠁡󠁧󠁣󠁿 Flag for Aghjabadi (AZ-AGC) 🏴󠁡󠁺󠁡󠁳󠁴󠁿 Flag for Astara (AZ-AST) 🏴󠁡󠁺󠁢󠁡󠁬󠁿 Flag for Balakan (AZ-BAL) 👩‍❤️‍💋‍👨🏼 Kiss - Woman, Man: Medium-Light Skin Tone 🏴󠁵󠁳󠁣󠁡󠁿 Flag for California (US-CA) 🏴󠁡󠁺󠁡󠁧󠁳󠁿 Flag for Agdash (AZ-AGS) 🏴󠁡󠁺󠁢󠁡󠁿 Flag for Baku (AZ-BA) 👨🏻‍❤️‍💋‍👩🏿 Kiss - Man: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁵󠁶󠁩󠁣󠁿 Flag for Victoria (AU-VIC) 🏴󠁡󠁺󠁡󠁧󠁭󠁿 Flag for Agdam (AZ-AGM) 👨🏻‍👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone 🏴󠁡󠁺󠁢󠁡󠁲󠁿 Flag for Barda (AZ-BAR) 👨🏽‍👩🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁡󠁺󠁡󠁧󠁵󠁿 Flag for Agsu (AZ-AGU) 🏴󠁣󠁤󠁴󠁡󠁿 Flag for Tanganyika (CD-TA) 👩🏻‍❤️‍👨🏼 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁡󠁺󠁢󠁩󠁬󠁿 Flag for Bilasuvar (AZ-BIL) 🏴󠁡󠁺󠁣󠁡󠁬󠁿 Flag for Jalilabad (AZ-CAL) 🏴󠁡󠁺󠁣󠁡󠁢󠁿 Flag for Jabrayil (AZ-CAB) 🏴󠁡󠁺󠁢󠁥󠁹󠁿 Flag for Beylagan (AZ-BEY) 🏴󠁳󠁩󠀰󠀸󠀵󠁿 Flag for Novo Mesto (SI-085) 🏴󠁣󠁧󠀹󠁿 Flag for Niari (CG-9) 🏴󠁡󠁺󠁤󠁡󠁳󠁿 Flag for Dashkasan (AZ-DAS) 🏴󠁡󠁺󠁦󠁵󠁺󠁿 Flag for Fizuli (AZ-FUZ) 👩🏿‍❤️‍💋‍👨🏽 Kiss - Woman: Dark Skin Tone, Man: Medium Skin Tone 👨🏿‍❤️‍👨🏾 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁡󠁺󠁧󠁯󠁹󠁿 Flag for Goychay (AZ-GOY) 🏴󠁡󠁺󠁧󠁯󠁲󠁿 Flag for Goranboy (AZ-GOR) 🏴󠁡󠁺󠁧󠁡󠁿 Flag for Ganja (AZ-GA) 🏴󠁱󠁡󠁵󠁳󠁿 Flag for Umm Salal (QA-US) 🏴󠁦󠁪󠁥󠁿 Flag for Eastern (FJ-E) 🏴󠁡󠁺󠁧󠁹󠁧󠁿 Flag for Goygol (AZ-GYG) 🏴󠁡󠁺󠁨󠁡󠁣󠁿 Flag for Hajigabul (AZ-HAC) 👩🏿‍❤️‍💋‍👩 Kiss - Woman: Dark Skin Tone, Woman 🏴󠁬󠁶󠀰󠀷󠀷󠁿 Flag for Rēzekne Municipality (LV-077) 🏴󠁡󠁵󠁡󠁣󠁴󠁿 Flag for Australian Capital Territory (AU-ACT) 👨🏽‍❤️‍💋‍👩🏾 Kiss - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁮󠁧󠁦󠁣󠁿 Flag for Federal Capital Territory (NG-FC) 🏴󠁲󠁵󠁢󠁲󠁹󠁿 Flag for Bryansk (RU-BRY) 🏴󠁡󠁭󠁴󠁶󠁿 Flag for Tavush (AM-TV) 🏴󠁥󠁣󠁳󠁤󠁿 Flag for Santo Domingo de los Tsáchilas (EC-SD) 👩🏼‍❤️‍👩 Couple With Heart - Woman: Medium-Light Skin Tone, Woman 🏴󠁡󠁺󠁩󠁭󠁩󠁿 Flag for Imishli (AZ-IMI) 🏴󠁴󠁭󠁳󠁿 Flag for Aşgabat (TM-S) 👨‍❤️‍👩🏾 Couple With Heart - Man, Woman: Medium-Dark Skin Tone 🏴󠁬󠁡󠁸󠁥󠁿 Flag for Sekong (LA-XE) 🏴󠁲󠁯󠁧󠁪󠁿 Flag for Gorj (RO-GJ) 👨🏻‍❤️‍👨 Couple With Heart - Man: Light Skin Tone, Man 🏴󠁡󠁺󠁫󠁵󠁲󠁿 Flag for Kurdamir (AZ-KUR) 👩🏻‍👨🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁡󠁺󠁫󠁡󠁬󠁿 Flag for Kalbajar (AZ-KAL) 🏴󠁡󠁺󠁧󠁡󠁤󠁿 Flag for Gadabay (AZ-GAD) 🏴󠁡󠁺󠁬󠁡󠁣󠁿 Flag for Lachin (AZ-LAC) 🏴󠁡󠁺󠁬󠁡󠁿 Flag for Lankaran (AZ-LA) 🏴󠁶󠁮󠁳󠁧󠁿 Flag for Ho Chi Minh City (VN-SG) 🏴󠁡󠁺󠁬󠁥󠁲󠁿 Flag for Lerik (AZ-LER) 🏴󠁡󠁺󠁭󠁩󠁿 Flag for Mingachevir (AZ-MI) 👩🏾‍👨🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁡󠁺󠁮󠁡󠁿 Flag for Naftalan (AZ-NA) 🏴󠁡󠁺󠁭󠁡󠁳󠁿 Flag for Masally (AZ-MAS) 👨‍❤️‍👩 Couple With Heart - Man, Woman 🏴󠁡󠁺󠁬󠁡󠁮󠁿 Flag for Lankaran District (AZ-LAN) 👩🏼‍👨🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍❤️‍💋‍👨🏾 Kiss - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁡󠁺󠁮󠁥󠁦󠁿 Flag for Neftchala (AZ-NEF) 🏴󠁡󠁺󠁮󠁸󠁿 Flag for Nakhchivan AR (AZ-NX) 🏴󠁳󠁩󠀰󠀱󠀱󠁿 Flag for Celje (SI-011) 🏴󠁬󠁴󠀳󠀲󠁿 Flag for Panevėžio Municipality (LT-32) 👩🏿‍❤️‍💋‍👩🏽 Kiss - Woman: Dark Skin Tone, Woman: Medium Skin Tone 👨🏻‍❤️‍👩🏿 Couple With Heart - Man: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁺󠁩󠁳󠁭󠁿 Flag for Ismailli (AZ-ISM) 󠁨 Tag Latin Small Letter H 👩🏾‍❤️‍👨🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone 👩🏻‍👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone 🏴󠁣󠁦󠁮󠁭󠁿 Flag for Nana-Mambéré (CF-NM) 🏴󠁡󠁺󠁱󠁯󠁢󠁿 Flag for Gobustan (AZ-QOB) 👩🏿‍❤️‍💋‍👨🏻 Kiss - Woman: Dark Skin Tone, Man: Light Skin Tone 👩🏿‍❤️‍💋‍👩🏿 Kiss - Woman: Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁺󠁱󠁢󠁩󠁿 Flag for Qubadli (AZ-QBI) 🏴󠁡󠁺󠁱󠁡󠁺󠁿 Flag for Qazakh (AZ-QAZ) 🏴󠁲󠁯󠁢󠁶󠁿 Flag for Braşov (RO-BV) 👨‍👩‍👧‍👶 Family: Man, Woman, Girl, Baby 🏴󠁡󠁺󠁱󠁢󠁡󠁿 Flag for Quba (AZ-QBA) 🏴󠁡󠁺󠁱󠁡󠁢󠁿 Flag for Qabala (AZ-QAB) 🏴󠁣󠁨󠁵󠁲󠁿 Flag for Uri (CH-UR) 🏴󠁡󠁺󠁯󠁧󠁵󠁿 Flag for Oghuz (AZ-OGU) 🏴󠁡󠁺󠁱󠁡󠁸󠁿 Flag for Qakh (AZ-QAX) 🏴󠁳󠁩󠀲󠀰󠀶󠁿 Flag for Šmarješke Toplice (SI-206) 👨🏾‍❤️‍💋‍👩🏿 Kiss - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁧󠀰󠀷󠁿 Flag for Saint Peter (AG-07) 👨🏻‍👩🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁬󠁲󠁭󠁹󠁿 Flag for Maryland (LR-MY) 🏴󠁡󠁵󠁳󠁡󠁿 Flag for South Australia (AU-SA) 🏴󠁡󠁺󠁱󠁵󠁳󠁿 Flag for Qusar (AZ-QUS) 🏴󠁡󠁺󠁳󠁡󠁢󠁿 Flag for Sabirabad (AZ-SAB) 👨‍❤️‍👩🏽 Couple With Heart - Man, Woman: Medium Skin Tone 👨‍❤️‍👩🏼 Couple With Heart - Man, Woman: Medium-Light Skin Tone 🏴󠁡󠁺󠁳󠁡󠁴󠁿 Flag for Saatly (AZ-SAT) 🏴󠁡󠁺󠁳󠁢󠁮󠁿 Flag for Shabran (AZ-SBN) 👨🏼‍❤️‍👩🏽 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone 🏴󠁡󠁺󠁳󠁡󠁫󠁿 Flag for Shaki District (AZ-SAK) 🏴󠁣󠁯󠁣󠁡󠁳󠁿 Flag for Casanare (CO-CAS) 👨‍👩‍👶‍👶 Family: Man, Woman, Baby, Baby 🏴󠁡󠁺󠁳󠁲󠁿 Flag for Shirvan (AZ-SR) 🏴󠁡󠁺󠁳󠁵󠁳󠁿 Flag for Shusha (AZ-SUS) 🏴󠁣󠁨󠁶󠁳󠁿 Flag for Valais (CH-VS) 👩🏽‍👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone 👩🏻‍❤️‍💋‍👨🏿 Kiss - Woman: Light Skin Tone, Man: Dark Skin Tone 🏴󠁡󠁺󠁳󠁡󠁿 Flag for Shaki (AZ-SA) 🏴󠁦󠁲󠁭󠁱󠁿 Flag for Martinique (FR-MQ) 🏴󠁡󠁺󠁳󠁭󠁿 Flag for Sumqayit (AZ-SM) 🏴󠁡󠁺󠁳󠁩󠁹󠁿 Flag for Siazan (AZ-SIY) 🏴󠁡󠁺󠁳󠁭󠁩󠁿 Flag for Shamakhi (AZ-SMI) 👩🏿‍❤️‍💋‍👨 Kiss - Woman: Dark Skin Tone, Man 🏴󠁡󠁺󠁳󠁭󠁸󠁿 Flag for Samukh (AZ-SMX) 👨🏻‍👩🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 🏴󠁡󠁺󠁴󠁯󠁶󠁿 Flag for Tovuz (AZ-TOV) 🏴󠁡󠁺󠁸󠁡󠁣󠁿 Flag for Khachmaz (AZ-XAC) 🏴󠁡󠁺󠁵󠁣󠁡󠁿 Flag for Ujar (AZ-UCA) 🏴󠁡󠁺󠁴󠁡󠁲󠁿 Flag for Tartar (AZ-TAR) 👨🏿‍❤️‍💋‍👨🏻 Kiss - Man: Dark Skin Tone, Man: Light Skin Tone 👩🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👩🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁡󠁺󠁸󠁩󠁺󠁿 Flag for Khizi (AZ-XIZ) 👨🏽‍❤️‍👨🏼 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁡󠁺󠁸󠁣󠁩󠁿 Flag for Khojali (AZ-XCI) 🏴󠁶󠁥󠁹󠁿 Flag for Delta Amacuro (VE-Y) 🏴󠁡󠁺󠁸󠁡󠁿 Flag for Stepanakert (AZ-XA) 🏴󠁡󠁺󠁹󠁡󠁲󠁿 Flag for Yardymli (AZ-YAR) 🏴󠁡󠁺󠁹󠁥󠁶󠁿 Flag for Yevlakh District (AZ-YEV) 🏴󠁡󠁺󠁺󠁡󠁱󠁿 Flag for Zaqatala (AZ-ZAQ) 👩🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁡󠁺󠁹󠁥󠁿 Flag for Yevlakh (AZ-YE) 🏴󠁢󠁡󠁢󠁩󠁨󠁿 Flag for Federation of Bosnia and Herzegovina (BA-BIH) 🏴󠁡󠁺󠁺󠁡󠁲󠁿 Flag for Zardab (AZ-ZAR) 🏴󠁡󠁺󠁳󠁡󠁬󠁿 Flag for Salyan (AZ-SAL) 🏴󠁣󠁨󠁺󠁧󠁿 Flag for Zug (CH-ZG) 👨🏾‍👩🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👩🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏿‍👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁡󠁳󠁲󠁰󠁿 Flag for Republika Srpska (BA-SRP) 👨🏽‍❤️‍👩 Couple With Heart - Man: Medium Skin Tone, Woman 👨🏻‍👩🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone 🏴󠁥󠁳󠁡󠁮󠁿 Flag for Andalusia (ES-AN) 👨🏼‍👩🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁢󠁢󠀰󠀴󠁿 Flag for Saint James (BB-04) 👨🏾‍❤️‍👩🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁢󠀰󠀳󠁿 Flag for Saint George (BB-03) 🏴󠁢󠁢󠀰󠀲󠁿 Flag for Saint Andrew (BB-02) 👨‍👩‍👶‍👦 Family: Man, Woman, Baby, Boy 👨🏽‍👩🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁢󠁢󠀰󠀵󠁿 Flag for Saint John (BB-05) 👨🏾‍👩🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁢󠀰󠀶󠁿 Flag for Saint Joseph (BB-06) 🏴󠁬󠁫󠀱󠁿 Flag for Western (LK-1) 🏴󠁢󠁹󠁢󠁲󠁿 Flag for Brest (BY-BR) 🏴󠁡󠁺󠁳󠁫󠁲󠁿 Flag for Shamkir (AZ-SKR) 🏴󠁢󠁢󠀰󠀷󠁿 Flag for Saint Lucy (BB-07) 👩🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 🏴󠁥󠁳󠁣󠁭󠁿 Flag for Castile-La Mancha (ES-CM) 🏴󠁢󠁢󠀱󠀰󠁿 Flag for Saint Philip (BB-10) 🏴󠁶󠁣󠀰󠀴󠁿 Flag for Saint George (VC-04) 👨🏻‍👩🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁤󠁡󠁿 Flag for Barisal (BD-A) 🏴󠁡󠁺󠁺󠁡󠁮󠁿 Flag for Zangilan (AZ-ZAN) 🏴󠁪󠁭󠀰󠀱󠁿 Flag for Kingston (JM-01) 👨🏼‍👩🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁢󠁤󠁥󠁿 Flag for Rajshahi Division (BD-E) 🏴󠁢󠁤󠁦󠁿 Flag for Rangpur Division (BD-F) 🏴󠁢󠁤󠁣󠁿 Flag for Dhaka Division (BD-C) 🏴󠁢󠁤󠁤󠁿 Flag for Khulna Division (BD-D) 🏴󠁢󠁢󠀰󠀹󠁿 Flag for Saint Peter (BB-09) 🏴󠁳󠁩󠀰󠀵󠀸󠁿 Flag for Lenart (SI-058) 👩🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁢󠁦󠀰󠀲󠁿 Flag for Cascades (BF-02) 🏴󠁢󠁤󠁨󠁿 Flag for Mymensingh Division (BD-H) 🏴󠁢󠁥󠁷󠁡󠁬󠁿 Flag for Wallonia (BE-WAL) 🏴󠁭󠁵󠁢󠁲󠁿 Flag for Beau-Bassin Rose-Hill (MU-BR) 🏴󠁢󠁦󠀰󠀴󠁿 Flag for Centre-Est (BF-04) 🏴󠁣󠁮󠀹󠀱󠁿 Flag for Hong Kong SAR China (CN-91) 🏴󠁢󠁦󠀰󠀱󠁿 Flag for Boucle du Mouhoun (BF-01) 🏴󠁢󠁦󠀰󠀳󠁿 Flag for Centre (BF-03) 🏴󠁤󠁫󠀸󠀲󠁿 Flag for Central Denmark (DK-82) 🏴󠁢󠁦󠀰󠀷󠁿 Flag for Centre-Sud (BF-07) 👨🏽‍👩🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁢󠁦󠀰󠀶󠁿 Flag for Centre-Ouest (BF-06) 🏴󠁢󠁦󠀰󠀵󠁿 Flag for Centre-Nord (BF-05) 🏴󠁢󠁢󠀰󠀸󠁿 Flag for Saint Michael (BB-08) 🏴󠁢󠁢󠀱󠀱󠁿 Flag for Saint Thomas (BB-11) 👨🏽‍❤️‍👩🏿 Couple With Heart - Man: Medium Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁦󠀰󠀸󠁿 Flag for Est (BF-08) 🏴󠁢󠁥󠁢󠁲󠁵󠁿 Flag for Brussels (BE-BRU) 🏴󠁢󠁤󠁧󠁿 Flag for Sylhet Division (BD-G) 🏴󠁢󠁦󠀱󠀱󠁿 Flag for Plateau-Central (BF-11) 🏴󠁢󠁤󠁢󠁿 Flag for Chittagong Division (BD-B) 🏴󠁢󠁦󠀱󠀳󠁿 Flag for Sud-Ouest (BF-13) 👨🏾‍👩🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁧󠀰󠀵󠁿 Flag for Vidin (BG-05) 🏴󠁢󠁧󠀰󠀳󠁿 Flag for Varna (BG-03) 👨🏿‍❤️‍👩🏽 Couple With Heart - Man: Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁧󠀰󠀲󠁿 Flag for Burgas (BG-02) 🏴󠁢󠁦󠀱󠀰󠁿 Flag for Nord (BF-10) 🏴󠁢󠁧󠀰󠀴󠁿 Flag for Veliko Tarnovo (BG-04) 👨🏽‍👩🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁧󠀰󠀷󠁿 Flag for Gabrovo (BG-07) 👨🏿‍👩🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁧󠀰󠀸󠁿 Flag for Dobrich (BG-08) 🏴󠁢󠁦󠀱󠀲󠁿 Flag for Sahel (BF-12) 🏴󠁡󠁵󠁴󠁡󠁳󠁿 Flag for Tasmania (AU-TAS) 👨🏿‍❤️‍👩🏻 Couple With Heart - Man: Dark Skin Tone, Woman: Light Skin Tone 👩🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 👨🏻‍👩🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👩🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👩🏾 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁺󠁸󠁶󠁤󠁿 Flag for Khojavend (AZ-XVD) 🏴󠁢󠁧󠀱󠀱󠁿 Flag for Lovech (BG-11) 🏴󠁣󠁬󠁬󠁩󠁿 Flag for Libertador General Bernardo O’Higgins (CL-LI) 🏴󠁢󠁧󠀱󠀳󠁿 Flag for Pazardzhik (BG-13) 👨🏿‍❤️‍👩🏿 Couple With Heart - Man: Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁧󠀱󠀴󠁿 Flag for Pernik (BG-14) 🏴󠁢󠁧󠀱󠀰󠁿 Flag for Kyustendil (BG-10) 🏴󠁥󠁧󠁢󠁡󠁿 Flag for Red Sea (EG-BA) 🏴󠁴󠁺󠀱󠀱󠁿 Flag for Zanzibar Central/South (TZ-11) 👨🏿‍👩🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁧󠀱󠀵󠁿 Flag for Pleven (BG-15) 👨🏿‍👨🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👨🏽‍👩🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁧󠀲󠀱󠁿 Flag for Smolyan (BG-21) 🏴󠁢󠁧󠀰󠀱󠁿 Flag for Blagoevgrad (BG-01) 🏴󠁤󠁺󠀳󠀴󠁿 Flag for Bordj Bou Arréridj (DZ-34) 🏴󠁢󠁧󠀱󠀶󠁿 Flag for Plovdiv (BG-16) 🏴󠁣󠁩󠁶󠁢󠁿 Flag for Vallée du Bandama (CI-VB) 🏴󠁢󠁧󠀱󠀹󠁿 Flag for Silistra (BG-19) 👩‍❤️‍👨🏼 Couple With Heart - Woman, Man: Medium-Light Skin Tone 🏴󠁢󠁧󠀱󠀷󠁿 Flag for Razgrad (BG-17) 👨🏾‍❤️‍👨 Couple With Heart - Man: Medium-Dark Skin Tone, Man 🏴󠁡󠁯󠁣󠁮󠁮󠁿 Flag for Cunene (AO-CNN) 🏴󠁢󠁧󠀲󠀰󠁿 Flag for Sliven (BG-20) 🧕🏻‍♀️ Woman With Headscarf: Light Skin Tone 🏴󠁢󠁧󠀲󠀵󠁿 Flag for Targovishte (BG-25) 👩🏼‍👩🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏾‍👩🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁧󠀲󠀳󠁿 Flag for Sofia District (BG-23) 🏴󠁢󠁧󠀲󠀲󠁿 Flag for Sofia (BG-22) 👨🏿‍👩🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍❤️‍💋‍👩🏾 Kiss - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone 🧕🏽‍♀️ Woman With Headscarf: Medium Skin Tone 🏴󠁢󠁧󠀲󠀸󠁿 Flag for Yambol (BG-28) 🏴󠁢󠁨󠀱󠀳󠁿 Flag for Capital (BH-13) 🏴󠁢󠁧󠀲󠀶󠁿 Flag for Haskovo (BG-26) 🏴󠁬󠁩󠀰󠀷󠁿 Flag for Schaan (LI-07) 👨🏿‍👩🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁨󠀱󠀵󠁿 Flag for Muharraq (BH-15) 🏴󠁢󠁨󠀱󠀴󠁿 Flag for Southern (BH-14) 🧕🏾‍♀️ Woman With Headscarf: Medium-Dark Skin Tone 🏴󠁲󠁯󠁳󠁢󠁿 Flag for Sibiu (RO-SB) 🧕🏼‍♀️ Woman With Headscarf: Medium-Light Skin Tone 👩🏻‍❤️‍👨🏿 Couple With Heart - Woman: Light Skin Tone, Man: Dark Skin Tone 🏴󠁢󠁨󠀱󠀷󠁿 Flag for Northern (BH-17) 🏴󠁢󠁩󠁢󠁢󠁿 Flag for Bubanza (BI-BB) 👩🏻‍❤️‍👩 Couple With Heart - Woman: Light Skin Tone, Woman 🏴󠁢󠁥󠁶󠁬󠁧󠁿 Flag for Flanders (BE-VLG) 👩🏽‍👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone 👨🏻‍👩🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁢󠁩󠁢󠁭󠁿 Flag for Bujumbura (BI-BM) 🧕🏿‍♀️ Woman With Headscarf: Dark Skin Tone 🏴󠁢󠁩󠁢󠁬󠁿 Flag for Bujumbura Rural (BI-BL) 👨🏾‍❤️‍💋‍👩🏽 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone 👨🏼‍👩🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏻‍👨🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 🏴󠁢󠁩󠁣󠁡󠁿 Flag for Cankuzo (BI-CA) 🏴󠁢󠁧󠀱󠀲󠁿 Flag for Montana (BG-12) 🏴󠁬󠁶󠀰󠀸󠀵󠁿 Flag for Sala (LV-085) ⃣ Combining Enclosing Keycap 🏴󠁢󠁩󠁢󠁲󠁿 Flag for Bururi (BI-BR) 🏴󠁢󠁧󠀰󠀹󠁿 Flag for Kardzhali (BG-09) 🏴󠁢󠁩󠁲󠁭󠁿 Flag for Rumonge (BI-RM) 🏴󠁮󠁬󠁡󠁷󠁿 Flag for Aruba (NL-AW) 🏴󠁢󠁩󠁭󠁹󠁿 Flag for Muyinga (BI-MY) 🏴󠁢󠁩󠁲󠁴󠁿 Flag for Rutana (BI-RT) 🏴󠁢󠁩󠁲󠁹󠁿 Flag for Ruyigi (BI-RY) 🏴󠁢󠁩󠁫󠁩󠁿 Flag for Kirundo (BI-KI) 🏴󠁢󠁩󠁫󠁹󠁿 Flag for Kayanza (BI-KY) 🏴󠁢󠁩󠁭󠁷󠁿 Flag for Mwaro (BI-MW) 🏴󠁢󠁧󠀲󠀷󠁿 Flag for Shumen (BG-27) 🏴󠁢󠁩󠁮󠁧󠁿 Flag for Ngozi (BI-NG) 🏴󠁢󠁩󠁫󠁲󠁿 Flag for Karuzi (BI-KR) 🏴󠁢󠁩󠁭󠁵󠁿 Flag for Muramvya (BI-MU) 🏴󠁭󠁡󠀱󠀵󠁿 Flag for Laâyoune-Boujdour-Sakia El Hamra (MA-15) 👨🏽‍👩🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏾‍👩🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁪󠁤󠁯󠁿 Flag for Donga (BJ-DO) 👩🏽‍👨🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👨🏽‍❤️‍💋‍👩🏼 Kiss - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁦󠁲󠁨󠁤󠁦󠁿 Flag for Hauts-de-France (FR-HDF) 🏴󠁢󠁪󠁡󠁬󠁿 Flag for Alibori (BJ-AL) 🏴󠁢󠁪󠁡󠁫󠁿 Flag for Atakora (BJ-AK) 👨🏿‍👩🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁪󠁬󠁩󠁿 Flag for Littoral (BJ-LI) 🏴󠁢󠁪󠁢󠁯󠁿 Flag for Borgou (BJ-BO) 👩‍👩‍👧‍👶 Family: Woman, Woman, Girl, Baby 🏴󠁵󠁳󠁮󠁤󠁿 Flag for North Dakota (US-ND) 👨🏼‍❤️‍💋‍👨🏾 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁪󠁫󠁯󠁿 Flag for Kouffo (BJ-KO) 🏴󠁢󠁪󠁰󠁬󠁿 Flag for Plateau (BJ-PL) 🏴󠁧󠁤󠀱󠀰󠁿 Flag for Carriacou and Petite Martinique (GD-10) 🏴󠁢󠁪󠁺󠁯󠁿 Flag for Zou (BJ-ZO) 👩🏼‍❤️‍👨🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍👨🏽 Couple With Heart - Woman: Medium Skin Tone, Man: Medium Skin Tone 👨🏽‍❤️‍👩🏼 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone 👩🏽‍❤️‍👨🏻 Couple With Heart - Woman: Medium Skin Tone, Man: Light Skin Tone 🏴󠁬󠁢󠁢󠁩󠁿 Flag for Beqaa (LB-BI) 🏴󠁢󠁮󠁴󠁥󠁿 Flag for Temburong (BN-TE) 👩🏻‍👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁮󠁴󠁵󠁿 Flag for Tutong (BN-TU) 🏴󠁢󠁮󠁢󠁭󠁿 Flag for Brunei-Muara (BN-BM) 👨🏻‍👩🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁧󠀰󠀶󠁿 Flag for Vratsa (BG-06) 👩🏽‍❤️‍👨🏼 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁯󠁢󠁿 Flag for Beni (BO-B) 🏴󠁢󠁮󠁢󠁥󠁿 Flag for Belait (BN-BE) 👩🏼‍❤️‍👨 Couple With Heart - Woman: Medium-Light Skin Tone, Man 🏴󠁢󠁪󠁯󠁵󠁿 Flag for Ouémé (BJ-OU) 👩🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁳󠁣󠀲󠀵󠁿 Flag for Roche Caiman (SC-25) 👩🏻‍❤️‍👨🏾 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁯󠁣󠁿 Flag for Cochabamba (BO-C) 👨🏾‍👩🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁯󠁮󠁿 Flag for Pando (BO-N) 👩🏽‍❤️‍👩🏻 Couple With Heart - Woman: Medium Skin Tone, Woman: Light Skin Tone 👩🏾‍❤️‍👨🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁯󠁨󠁿 Flag for Chuquisaca (BO-H) 🏴󠁢󠁯󠁬󠁿 Flag for La Paz (BO-L) 🏴󠁭󠁮󠀰󠀳󠀹󠁿 Flag for Khentii (MN-039) 🕴🏽‍♀️ Woman in Business Suit Levitating: Medium Skin Tone 🏴󠁭󠁫󠀲󠀷󠁿 Flag for Dolneni (MK-27) 🏴󠁢󠁧󠀲󠀴󠁿 Flag for Stara Zagora (BG-24) 👩🏽‍👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁩󠁲󠀱󠀳󠁿 Flag for Sistan and Baluchestan (IR-13) 👩🏾‍❤️‍👨🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁯󠁰󠁿 Flag for Potosí (BO-P) 🏴󠁢󠁱󠁢󠁯󠁿 Flag for Bonaire (BQ-BO) 👩‍❤️‍💋‍👨🏻 Kiss - Woman, Man: Light Skin Tone 👩🏾‍❤️‍👨 Couple With Heart - Woman: Medium-Dark Skin Tone, Man 👩🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁢󠁡󠁢󠁲󠁣󠁿 Flag for Brčko District (BA-BRC) 🏴󠁢󠁱󠁳󠁡󠁿 Flag for Saba (BQ-SA) 👩🏽‍❤️‍👨🏾 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩🏾‍❤️‍👨🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone 🏴󠁢󠁲󠁡󠁣󠁿 Flag for Acre (BR-AC) 🏴󠁢󠁩󠁧󠁩󠁿 Flag for Gitega (BI-GI) 👩🏿‍👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone 👩🏿‍❤️‍👨🏻 Couple With Heart - Woman: Dark Skin Tone, Man: Light Skin Tone 🏴󠁢󠁲󠁡󠁭󠁿 Flag for Amazonas (BR-AM) 🏴󠁡󠁲󠁣󠁿 Flag for Buenos Aires (AR-C) 👨🏼‍👩🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏼‍❤️‍💋‍👨🏼 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁲󠁥󠁳󠁿 Flag for Espírito Santo (BR-ES) 👨🏿‍❤️‍💋‍👨🏾 Kiss - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👨🏽 Kiss - Man: Medium-Light Skin Tone, Man: Medium Skin Tone 👩🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏻‍❤️‍👩 Couple With Heart - Man: Light Skin Tone, Woman 👨🏿‍❤️‍💋‍👩🏾 Kiss - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏽 Kiss - Woman: Light Skin Tone, Woman: Medium Skin Tone 👨🏼‍❤️‍💋‍👨🏿 Kiss - Man: Medium-Light Skin Tone, Man: Dark Skin Tone 👩🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏿‍❤️‍👩🏼 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁲󠁭󠁡󠁿 Flag for Maranhão (BR-MA) 👩🏿‍❤️‍👩🏽 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium Skin Tone 👩🏿‍❤️‍👩 Couple With Heart - Woman: Dark Skin Tone, Woman 🏴󠁢󠁲󠁡󠁰󠁿 Flag for Amapá (BR-AP) 👨🏽‍❤️‍👨🏻 Couple With Heart - Man: Medium Skin Tone, Man: Light Skin Tone 👩🏻‍❤️‍💋‍👨🏻 Kiss - Woman: Light Skin Tone, Man: Light Skin Tone 👨🏽‍❤️‍💋‍👨🏽 Kiss - Man: Medium Skin Tone, Man: Medium Skin Tone 👩🏿‍❤️‍💋‍👩🏻 Kiss - Woman: Dark Skin Tone, Woman: Light Skin Tone 👨🏽‍❤️‍💋‍👩🏿 Kiss - Man: Medium Skin Tone, Woman: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏾 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏿‍❤️‍💋‍👨🏼 Kiss - Man: Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👨🏿 Kiss - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone 👩🏽‍❤️‍💋‍👩🏿 Kiss - Woman: Medium Skin Tone, Woman: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏿 Kiss - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone 👨🏽‍❤️‍💋‍👩🏽 Kiss - Man: Medium Skin Tone, Woman: Medium Skin Tone 👨🏾‍❤️‍💋‍👨🏼 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏽‍❤️‍💋‍👩🏻 Kiss - Man: Medium Skin Tone, Woman: Light Skin Tone 👨🏾‍❤️‍💋‍👨 Kiss - Man: Medium-Dark Skin Tone, Man 👨🏾‍❤️‍💋‍👨🏾 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍💋‍👨🏾 Kiss - Woman, Man: Medium-Dark Skin Tone 👩‍❤️‍💋‍👩🏻 Kiss - Woman, Woman: Light Skin Tone 👩🏽‍❤️‍💋‍👨🏻 Kiss - Woman: Medium Skin Tone, Man: Light Skin Tone 👩🏿‍❤️‍💋‍👨🏿 Kiss - Woman: Dark Skin Tone, Man: Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏿 Kiss - Woman: Light Skin Tone, Woman: Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏼 Kiss - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏾‍❤️‍💋‍👩🏿 Kiss - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone 👩🏾‍❤️‍💋‍👩 Kiss - Woman: Medium-Dark Skin Tone, Woman 👩🏾‍❤️‍💋‍👩🏻 Kiss - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone 👩🏻‍❤️‍👨 Couple With Heart - Woman: Light Skin Tone, Man 👩🏻‍👩🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone 👩🏾‍❤️‍💋‍👨🏾 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏻‍❤️‍👨🏽 Couple With Heart - Man: Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁲󠁭󠁴󠁿 Flag for Mato Grosso (BR-MT) 👨🏽‍❤️‍👩🏻 Couple With Heart - Man: Medium Skin Tone, Woman: Light Skin Tone 👨‍❤️‍👨🏿 Couple With Heart - Man, Man: Dark Skin Tone 👩🏿‍❤️‍💋‍👨🏼 Kiss - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone 👩🏿‍❤️‍💋‍👩🏾 Kiss - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁯󠁳󠁿 Flag for Santa Cruz (BO-S) 👨🏻‍❤️‍👩🏽 Couple With Heart - Man: Light Skin Tone, Woman: Medium Skin Tone 👨🏽‍❤️‍👩🏽 Couple With Heart - Man: Medium Skin Tone, Woman: Medium Skin Tone 👩🏾‍❤️‍💋‍👩🏽 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁪󠁣󠁯󠁿 Flag for Collines (BJ-CO) 👨🏻‍👩🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone 👨‍❤️‍👨🏽 Couple With Heart - Man, Man: Medium Skin Tone 👨🏾‍👩🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏼‍❤️‍👨 Couple With Heart - Man: Medium-Light Skin Tone, Man 👨🏾‍❤️‍👩🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁲󠁰󠁡󠁿 Flag for Pará (BR-PA) 👩🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👨🏼‍❤️‍👨🏼 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 👨🏿‍❤️‍👨🏻 Couple With Heart - Man: Dark Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍👩🏽 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium Skin Tone 👨🏾‍❤️‍👨🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone 👨🏽‍❤️‍👨🏽 Couple With Heart - Man: Medium Skin Tone, Man: Medium Skin Tone 👨🏻‍❤️‍👩🏼 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏾‍❤️‍👩🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone 👨🏾‍❤️‍👨🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 👩🏾‍❤️‍💋‍👩🏾 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏼‍❤️‍👩🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone 👨🏿‍❤️‍👩🏼 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👨🏾 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍👨🏾 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍👨🏾 Couple With Heart - Woman, Man: Medium-Dark Skin Tone 🏴󠁢󠁲󠁡󠁬󠁿 Flag for Alagoas (BR-AL) 👩‍❤️‍👨🏻 Couple With Heart - Woman, Man: Light Skin Tone 🏴󠁢󠁦󠀰󠀹󠁿 Flag for Hauts-Bassins (BF-09) 👨🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍❤️‍👩🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁢󠁲󠁲󠁪󠁿 Flag for Rio de Janeiro (BR-RJ) 👨🏾‍❤️‍💋‍👩🏻 Kiss - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁲󠁲󠁯󠁿 Flag for Rondônia (BR-RO) 👨🏾‍❤️‍👨🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone 👨🏽‍👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone 👨🏼‍❤️‍👨🏽 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁲󠁰󠁩󠁿 Flag for Piauí (BR-PI) 👨🏽‍👩🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁢󠁲󠁲󠁮󠁿 Flag for Rio Grande do Norte (BR-RN) 👩🏻‍❤️‍👨🏻 Couple With Heart - Woman: Light Skin Tone, Man: Light Skin Tone 👨🏻‍👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍❤️‍👩🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏿‍❤️‍👩🏾 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁢󠁲󠁳󠁥󠁿 Flag for Sergipe (BR-SE) 🏴󠁢󠁲󠁰󠁲󠁿 Flag for Paraná (BR-PR) 👨🏿‍👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone 👩🏼‍❤️‍👩🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone 👩🏾‍❤️‍👩🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁲󠁵󠁭󠁯󠁳󠁿 Flag for Moscow Province (RU-MOS) 👩🏽‍❤️‍💋‍👩🏽 Kiss - Woman: Medium Skin Tone, Woman: Medium Skin Tone 👩🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁲󠁳󠁰󠁿 Flag for São Paulo (BR-SP) 🏴󠁩󠁲󠀰󠀱󠁿 Flag for East Azerbaijan (IR-01) 🏴󠁢󠁲󠁲󠁳󠁿 Flag for Rio Grande do Sul (BR-RS) 👩🏼‍❤️‍👨🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone 🏴󠁮󠁯󠀱󠀴󠁿 Flag for Sogn og Fjordane (NO-14) 🏴󠁢󠁲󠁴󠁯󠁿 Flag for Tocantins (BR-TO) 🏴󠁳󠁩󠀱󠀸󠀲󠁿 Flag for Sveti Andraž v Slovenskih Goricah (SI-182) 👨🏼‍❤️‍👩🏻 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Light Skin Tone 👨🏿‍❤️‍👨🏽 Couple With Heart - Man: Dark Skin Tone, Man: Medium Skin Tone 👨🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👨🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁳󠁢󠁩󠁿 Flag for Bimini (BS-BI) 👨🏿‍❤️‍👩 Couple With Heart - Man: Dark Skin Tone, Woman 👩🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁲󠁲󠁲󠁿 Flag for Roraima (BR-RR) 🏴󠁢󠁯󠁯󠁿 Flag for Oruro (BO-O) 🏴󠁢󠁳󠁥󠁸󠁿 Flag for Exuma (BS-EX) 👨🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏽‍❤️‍👨 Couple With Heart - Woman: Medium Skin Tone, Man 🏴󠁢󠁳󠁣󠁥󠁿 Flag for Central Eleuthera (BS-CE) 🏴󠁢󠁳󠁢󠁹󠁿 Flag for Berry Islands (BS-BY) 🏴󠁢󠁩󠁭󠁡󠁿 Flag for Makamba (BI-MA) 🏴󠁢󠁲󠁤󠁦󠁿 Flag for Federal District (BR-DF) 👩🏻‍❤️‍👩🏾 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏼 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁳󠁣󠁯󠁿 Flag for Central Abaco (BS-CO) 🏴󠁢󠁳󠁥󠁧󠁿 Flag for East Grand Bahama (BS-EG) 🏴󠁢󠁳󠁣󠁳󠁿 Flag for Central Andros (BS-CS) 👨🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁳󠁣󠁫󠁿 Flag for Crooked Island (BS-CK) 🏴󠁢󠁳󠁢󠁰󠁿 Flag for Black Point (BS-BP) 👨🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍❤️‍👨🏾 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone 👩🏾‍❤️‍💋‍👨🏼 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁮󠁥󠁿 Flag for North Eleuthera (BS-NE) 🏴󠁢󠁳󠁮󠁯󠁿 Flag for North Abaco (BS-NO) 🏴󠁢󠁳󠁭󠁧󠁿 Flag for Mayaguana (BS-MG) 👨🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏻 Kiss - Man: Medium-Light Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁳󠁧󠁣󠁿 Flag for Grand Cay (BS-GC) 🏴󠁢󠁳󠁦󠁰󠁿 Flag for Freeport (BS-FP) 🏴󠁢󠁳󠁩󠁮󠁿 Flag for Inagua (BS-IN) 🏴󠁢󠁳󠁨󠁴󠁿 Flag for Hope Town (BS-HT) 👩🏾‍❤️‍👩🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁳󠁬󠁩󠁿 Flag for Long Island (BS-LI) 👨🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👨🏾‍❤️‍👩 Couple With Heart - Man: Medium-Dark Skin Tone, Woman 👩🏿‍❤️‍👨🏿 Couple With Heart - Woman: Dark Skin Tone, Man: Dark Skin Tone 👨🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👨‍👨‍👶 Family: Man, Man, Baby 👩‍👧‍👶 Family: Woman, Girl, Baby 👨‍👦‍👶 Family: Man, Boy, Baby 👨‍👨‍👶‍👦 Family: Man, Man, Baby, Boy 👨‍👦‍👧 Family: Man, Boy, Girl 👨‍👶‍👶 Family: Man, Baby, Baby 🏴󠁢󠁳󠁲󠁩󠁿 Flag for Ragged Island (BS-RI) 👩🏿‍❤️‍👩🏿 Couple With Heart - Woman: Dark Skin Tone, Woman: Dark Skin Tone 👩🏿‍❤️‍👨🏽 Couple With Heart - Woman: Dark Skin Tone, Man: Medium Skin Tone 👩🏼‍❤️‍👨🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁮󠁳󠁿 Flag for North Andros (BS-NS) 👩🏿‍❤️‍👩🏻 Couple With Heart - Woman: Dark Skin Tone, Woman: Light Skin Tone 👨🏻‍❤️‍💋‍👨 Kiss - Man: Light Skin Tone, Man 🏴󠁢󠁳󠁳󠁡󠁿 Flag for South Andros (BS-SA) 👨🏻‍❤️‍💋‍👨🏼 Kiss - Man: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁳󠁥󠁿 Flag for South Eleuthera (BS-SE) 👨🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏻‍❤️‍💋‍👩🏻 Kiss - Man: Light Skin Tone, Woman: Light Skin Tone 👨🏼‍❤️‍💋‍👩🏾 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏾‍❤️‍💋‍👩🏼 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👨🏻 Kiss - Man: Medium-Dark Skin Tone, Man: Light Skin Tone 🏴󠁢󠁲󠁳󠁣󠁿 Flag for Santa Catarina (BR-SC) 👩‍👩‍👦‍👧 Family: Woman, Woman, Boy, Girl 👨‍❤️‍💋‍👩🏾 Kiss - Man, Woman: Medium-Dark Skin Tone 🏴󠁢󠁳󠁲󠁣󠁿 Flag for Rum Cay (BS-RC) 👩‍👩‍👶‍👦 Family: Woman, Woman, Baby, Boy 👨🏻‍❤️‍💋‍👩🏽 Kiss - Man: Light Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁳󠁣󠁩󠁿 Flag for Cat Island (BS-CI) 👩🏽‍❤️‍👩 Couple With Heart - Woman: Medium Skin Tone, Woman 👨🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩‍👨‍👦‍👶 Family: Woman, Man, Boy, Baby 👨🏾‍❤️‍💋‍👩 Kiss - Man: Medium-Dark Skin Tone, Woman 👨‍❤️‍💋‍👨🏻 Kiss - Man, Man: Light Skin Tone 👨🏻‍❤️‍💋‍👨🏿 Kiss - Man: Light Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏽 Kiss - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone 👨🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁳󠁳󠁯󠁿 Flag for South Abaco (BS-SO) 👩🏾‍❤️‍💋‍👩🏼 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏻‍❤️‍👨🏿 Couple With Heart - Man: Light Skin Tone, Man: Dark Skin Tone 👨🏿‍❤️‍💋‍👨🏿 Kiss - Man: Dark Skin Tone, Man: Dark Skin Tone 👩🏾‍❤️‍💋‍👨🏿 Kiss - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏽 Kiss - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone 👩🏾‍❤️‍💋‍👨🏻 Kiss - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍💋‍👨 Kiss - Woman: Medium Skin Tone, Man 👨‍👧‍👶 Family: Man, Girl, Baby 👩🏻‍❤️‍💋‍👨🏾 Kiss - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone 👨‍❤️‍👨🏼 Couple With Heart - Man, Man: Medium-Light Skin Tone 👩🏼‍❤️‍💋‍👩🏼 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏿‍❤️‍💋‍👩🏿 Kiss - Man: Dark Skin Tone, Woman: Dark Skin Tone 👨‍❤️‍💋‍👩🏼 Kiss - Man, Woman: Medium-Light Skin Tone 🏴󠁣󠁩󠁡󠁢󠁿 Flag for Abidjan (CI-AB) 👩🏻‍❤️‍💋‍👨 Kiss - Woman: Light Skin Tone, Man 👩🏼‍❤️‍💋‍👩🏾 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏻‍❤️‍💋‍👩🏼 Kiss - Man: Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏽‍❤️‍💋‍👨🏿 Kiss - Woman: Medium Skin Tone, Man: Dark Skin Tone 👩🏿‍❤️‍💋‍👩🏼 Kiss - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone 👩🏿‍❤️‍💋‍👨🏾 Kiss - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone 👩🏼‍❤️‍💋‍👨 Kiss - Woman: Medium-Light Skin Tone, Man 👩‍❤️‍👩🏾 Couple With Heart - Woman, Woman: Medium-Dark Skin Tone 👨🏿‍❤️‍👨🏼 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👨🏼‍❤️‍👩🏼 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏼‍❤️‍👨🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁳󠁳󠁷󠁿 Flag for Spanish Wells (BS-SW) 👨🏿‍❤️‍👨🏿 Couple With Heart - Man: Dark Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍👨🏿 Couple With Heart - Man: Medium-Light Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍👩 Couple With Heart - Man: Medium-Light Skin Tone, Woman 👩🏼‍❤️‍👩🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👨🏻 Couple With Heart - Man: Medium-Light Skin Tone, Man: Light Skin Tone 👨🏾‍❤️‍👨🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍👩🏼 Couple With Heart - Woman, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👩🏿 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨🏻‍❤️‍👨🏾 Couple With Heart - Man: Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍👩🏾 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone 👩‍❤️‍👩🏿 Couple With Heart - Woman, Woman: Dark Skin Tone 👨🏽‍❤️‍👨🏿 Couple With Heart - Man: Medium Skin Tone, Man: Dark Skin Tone 👨‍👨‍👦‍👶 Family: Man, Man, Boy, Baby 👨🏿‍❤️‍👨 Couple With Heart - Man: Dark Skin Tone, Man 👩🏻‍❤️‍👩🏿 Couple With Heart - Woman: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁳󠁳󠁳󠁿 Flag for San Salvador (BS-SS) 🏴󠁢󠁴󠀱󠀴󠁿 Flag for Samtse (BT-14) 👩🏻‍❤️‍👨🏽 Couple With Heart - Woman: Light Skin Tone, Man: Medium Skin Tone 👩🏼‍❤️‍👩🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨‍❤️‍👩🏿 Couple With Heart - Man, Woman: Dark Skin Tone 🏴󠁢󠁴󠀱󠀱󠁿 Flag for Paro (BT-11) 👨🏻‍❤️‍👩🏾 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁢󠁴󠀱󠀵󠁿 Flag for Thimphu (BT-15) 👩🏾‍❤️‍👩🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁳󠁷󠁧󠁿 Flag for West Grand Bahama (BS-WG) 🏴󠁢󠁴󠀱󠀳󠁿 Flag for Haa (BT-13) 🏴󠁢󠁴󠀱󠀲󠁿 Flag for Chukha (BT-12) 👨🏻‍❤️‍💋‍👨🏽 Kiss - Man: Light Skin Tone, Man: Medium Skin Tone 👨🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏽‍👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁳󠁡󠁫󠁿 Flag for Acklins (BS-AK) 🏴󠁢󠁴󠀳󠀲󠁿 Flag for Trongsa (BT-32) 🏴󠁢󠁴󠀴󠀱󠁿 Flag for Trashigang (BT-41) 🏴󠁢󠁴󠀲󠀳󠁿 Flag for Punakha (BT-23) 🏴󠁢󠁴󠀲󠀴󠁿 Flag for Wangdue Phodrang (BT-24) 🏴󠁢󠁴󠀳󠀳󠁿 Flag for Bumthang (BT-33) 🏴󠁢󠁴󠀳󠀴󠁿 Flag for Zhemgang (BT-34) 👩🏼‍❤️‍💋‍👨🏼 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁴󠀴󠀲󠁿 Flag for Mongar (BT-42) 🏴󠁢󠁲󠁰󠁢󠁿 Flag for Paraíba (BR-PB) 👩🏿‍❤️‍👨🏼 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁣󠁨󠁺󠁨󠁿 Flag for Zürich (CH-ZH) 🏴󠁢󠁴󠀳󠀱󠁿 Flag for Sarpang (BT-31) 🏴󠁢󠁴󠀲󠀲󠁿 Flag for Dagana (BT-22) 👩🏻‍❤️‍💋‍👨🏽 Kiss - Woman: Light Skin Tone, Man: Medium Skin Tone 👨🏿‍👨🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁷󠁣󠁥󠁿 Flag for Central (BW-CE) 🏴󠁢󠁴󠁧󠁡󠁿 Flag for Gasa (BT-GA) 🏴󠁢󠁷󠁣󠁨󠁿 Flag for Chobe (BW-CH) 🏴󠁢󠁴󠀴󠀵󠁿 Flag for Samdrup Jongkhar (BT-45) 🏴󠁢󠁷󠁦󠁲󠁿 Flag for Francistown (BW-FR) 🏴󠁢󠁴󠀴󠀴󠁿 Flag for Lhuntse (BT-44) 🏴󠁢󠁴󠁴󠁹󠁿 Flag for Trashiyangtse (BT-TY) 🏴󠁢󠁴󠀲󠀱󠁿 Flag for Tsirang (BT-21) 🏴󠁢󠁴󠀴󠀳󠁿 Flag for Pemagatshel (BT-43) 👨🏿‍👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁷󠁮󠁥󠁿 Flag for North East (BW-NE) 🏴󠁢󠁷󠁫󠁬󠁿 Flag for Kgatleng (BW-KL) 🏴󠁢󠁷󠁫󠁧󠁿 Flag for Kgalagadi (BW-KG) 🏴󠁢󠁷󠁳󠁥󠁿 Flag for South East (BW-SE) 🏴󠁢󠁷󠁫󠁷󠁿 Flag for Kweneng (BW-KW) 👨🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁷󠁮󠁷󠁿 Flag for North West (BW-NW) 🏴󠁢󠁷󠁪󠁷󠁿 Flag for Jwaneng (BW-JW) 🏴󠁢󠁳󠁭󠁣󠁿 Flag for Mangrove Cay (BS-MC) 👩🏼‍❤️‍💋‍👩🏿 Kiss - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁷󠁧󠁨󠁿 Flag for Ghanzi (BW-GH) 👨🏻‍❤️‍👩🏻 Couple With Heart - Man: Light Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁪󠁡󠁱󠁿 Flag for Atlantique (BJ-AQ) 👨🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👨🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁷󠁳󠁯󠁿 Flag for Southern (BW-SO) 👨🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍❤️‍👩 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman 👨‍👩‍👶‍👧 Family: Man, Woman, Baby, Girl 👨🏽‍❤️‍💋‍👨🏾 Kiss - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁷󠁳󠁴󠁿 Flag for Sowa Town (BW-ST) 🏴󠁢󠁷󠁳󠁰󠁿 Flag for Selibe Phikwe (BW-SP) 👩🏿‍❤️‍👩🏾 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩‍👨‍👦‍👦 Family: Woman, Man, Boy, Boy 👩🏿‍👨🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁹󠁨󠁭󠁿 Flag for Minsk (BY-HM) 🏴󠁢󠁹󠁨󠁯󠁿 Flag for Homel (BY-HO) 👨🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👨🏻‍👩🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁴󠁲󠀳󠀵󠁿 Flag for Izmir (TR-35) 🏴󠁢󠁹󠁨󠁲󠁿 Flag for Hrodna (BY-HR) 🏴󠁢󠁹󠁭󠁡󠁿 Flag for Magileu (BY-MA) 🏴󠁢󠁹󠁭󠁩󠁿 Flag for Minsk Region (BY-MI) 👨🏼‍❤️‍💋‍👩🏿 Kiss - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨🏾‍❤️‍👩🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁺󠁢󠁺󠁿 Flag for Belize (BZ-BZ) 🏴󠁢󠁷󠁬󠁯󠁿 Flag for Lobatse (BW-LO) 👩‍👦‍👧 Family: Woman, Boy, Girl 👨🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁢󠁳󠁭󠁩󠁿 Flag for Moore’s Island (BS-MI) 🏴󠁢󠁪󠁭󠁯󠁿 Flag for Mono (BJ-MO) 👨🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁹󠁶󠁩󠁿 Flag for Vitebsk (BY-VI) 🏴󠁢󠁺󠁳󠁣󠁿 Flag for Stann Creek (BZ-SC) 👨🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁺󠁣󠁺󠁬󠁿 Flag for Corozal (BZ-CZL) 👨🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁺󠁴󠁯󠁬󠁿 Flag for Toledo (BZ-TOL) 🏴󠁮󠁰󠀵󠁿 Flag for Sudur Pashchimanchal (NP-5) 🏴󠁢󠁳󠁨󠁩󠁿 Flag for Harbour Island (BS-HI) 🏴󠁣󠁡󠁡󠁢󠁿 Flag for Alberta (CA-AB) 👩🏾‍❤️‍👨🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍💋‍👨🏼 Kiss - Man: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁬󠁡󠁶󠁩󠁿 Flag for Vientiane Province (LA-VI) 👨‍👩‍👦‍👧 Family: Man, Woman, Boy, Girl 👨🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁣󠁡󠁰󠁥󠁿 Flag for Prince Edward Island (CA-PE) 🏴󠁣󠁤󠁫󠁧󠁿 Flag for Kwango (CD-KG) 🏴󠁣󠁡󠁮󠁳󠁿 Flag for Nova Scotia (CA-NS) 👨🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁣󠁤󠁨󠁵󠁿 Flag for Haut-Uélé (CD-HU) 🏴󠁣󠁤󠁢󠁣󠁿 Flag for Bas-Congo (CD-BC) 🏴󠁣󠁤󠁳󠁵󠁿 Flag for Sud-Ubangi (CD-SU) 🏴󠁣󠁤󠁭󠁡󠁿 Flag for Maniema (CD-MA) 🏴󠁣󠁤󠁳󠁡󠁿 Flag for Sankuru (CD-SA) 🏴󠁣󠁤󠁴󠁵󠁿 Flag for Tshuapa (CD-TU) 🏴󠁣󠁡󠁹󠁴󠁿 Flag for Yukon (CA-YT) 🏴󠁣󠁤󠁭󠁯󠁿 Flag for Mongala (CD-MO) 🏴󠁣󠁦󠁢󠁢󠁿 Flag for Bamingui-Bangoran (CF-BB) 🏴󠁣󠁤󠁭󠁮󠁿 Flag for Mai-Ndombe (CD-MN) 🏴󠁣󠁡󠁮󠁵󠁿 Flag for Nunavut (CA-NU) 🏴󠁣󠁤󠁫󠁬󠁿 Flag for Kwilu (CD-KL) 🏴󠁣󠁡󠁮󠁢󠁿 Flag for New Brunswick (CA-NB) 🏴󠁣󠁦󠁢󠁧󠁦󠁿 Flag for Bangui (CF-BGF) 🏴󠁣󠁤󠁫󠁮󠁿 Flag for Kinshasa (CD-KN) 🏴󠁣󠁤󠁮󠁫󠁿 Flag for North Kivu (CD-NK) 🏴󠁣󠁡󠁮󠁴󠁿 Flag for Northwest Territories (CA-NT) 🏴󠁣󠁤󠁴󠁯󠁿 Flag for Tshopo (CD-TO) 🏴󠁣󠁤󠁢󠁵󠁿 Flag for Bas-Uélé (CD-BU) 🏴󠁣󠁤󠁨󠁬󠁿 Flag for Haut-Lomami (CD-HL) 🏴󠁣󠁤󠁨󠁫󠁿 Flag for Haut-Katanga (CD-HK) 🏴󠁣󠁤󠁫󠁥󠁿 Flag for Kasaï-Oriental (CD-KE) 🏴󠁣󠁤󠁳󠁫󠁿 Flag for South Kivu (CD-SK) 🏴󠁣󠁡󠁯󠁮󠁿 Flag for Ontario (CA-ON) 🏴󠁣󠁦󠁡󠁣󠁿 Flag for Ouham (CF-AC) 🏴󠁣󠁦󠁨󠁳󠁿 Flag for Mambéré-Kadéï (CF-HS) 🏴󠁣󠁤󠁫󠁣󠁿 Flag for Kasaï Central (CD-KC) 🏴󠁣󠁤󠁮󠁵󠁿 Flag for Nord-Ubangi (CD-NU) 🏴󠁣󠁤󠁫󠁳󠁿 Flag for Kasaï (CD-KS) 🏴󠁣󠁤󠁩󠁴󠁿 Flag for Ituri (CD-IT) 🏴󠁣󠁨󠁢󠁥󠁿 Flag for Bern (CH-BE) 🏴󠁣󠁧󠀲󠁿 Flag for Lékoumou (CG-2) 🏴󠁣󠁨󠁡󠁩󠁿 Flag for Appenzell Innerrhoden (CH-AI) 🏴󠁣󠁦󠁭󠁰󠁿 Flag for Ombella-M’Poko (CF-MP) 👨🏻‍👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone 🏴󠁣󠁦󠁫󠁧󠁿 Flag for Kémo (CF-KG) 🏴󠁣󠁧󠀱󠀳󠁿 Flag for Sangha (CG-13) 🏴󠁣󠁨󠁬󠁵󠁿 Flag for Lucerne (CH-LU) 🏴󠁣󠁨󠁧󠁥󠁿 Flag for Geneva (CH-GE) 🏴󠁣󠁨󠁮󠁷󠁿 Flag for Nidwalden (CH-NW) 🏴󠁣󠁧󠀵󠁿 Flag for Kouilou (CG-5) 🏴󠁣󠁧󠀷󠁿 Flag for Likouala (CG-7) 🏴󠁣󠁧󠁢󠁺󠁶󠁿 Flag for Brazzaville (CG-BZV) 🏴󠁣󠁨󠁳󠁨󠁿 Flag for Schaffhausen (CH-SH) 🏴󠁣󠁤󠁬󠁯󠁿 Flag for Lomami (CD-LO) 🏴󠁣󠁨󠁡󠁲󠁿 Flag for Appenzell Ausserrhoden (CH-AR) 🏴󠁣󠁨󠁳󠁺󠁿 Flag for Schwyz (CH-SZ) 🏴󠁣󠁨󠁮󠁥󠁿 Flag for Neuchâtel (CH-NE) 🏴󠁣󠁦󠁯󠁰󠁿 Flag for Ouham-Pendé (CF-OP) 🏴󠁣󠁨󠁧󠁲󠁿 Flag for Graubünden (CH-GR) 🏴󠁣󠁨󠁳󠁯󠁿 Flag for Solothurn (CH-SO) 🏴󠁣󠁨󠁦󠁲󠁿 Flag for Fribourg (CH-FR) 🏴󠁣󠁧󠀱󠀴󠁿 Flag for Plateaux (CG-14) 🏴󠁣󠁦󠁳󠁥󠁿 Flag for Sangha-Mbaéré (CF-SE) 👨🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁣󠁨󠁡󠁧󠁿 Flag for Aargau (CH-AG) 🏴󠁣󠁧󠀱󠀵󠁿 Flag for Cuvette-Ouest (CG-15) 🏴󠁣󠁨󠁳󠁧󠁿 Flag for St. Gallen (CH-SG) 🏴󠁣󠁧󠀸󠁿 Flag for Cuvette (CG-8) 🏴󠁣󠁨󠁯󠁷󠁿 Flag for Obwalden (CH-OW) 🏴󠁣󠁨󠁢󠁳󠁿 Flag for Basel-Stadt (CH-BS) 🏴󠁣󠁦󠁬󠁢󠁿 Flag for Lobaye (CF-LB) 🏴󠁣󠁬󠁶󠁳󠁿 Flag for Valparaíso (CL-VS) 🏴󠁣󠁭󠁮󠁷󠁿 Flag for Northwest (CM-NW) 🏴󠁣󠁩󠁤󠁮󠁿 Flag for Denguélé (CI-DN) 🏴󠁣󠁭󠁮󠁯󠁿 Flag for North (CM-NO) 🏴󠁣󠁩󠁹󠁭󠁿 Flag for Yamoussoukro (CI-YM) 🏴󠁣󠁭󠁥󠁳󠁿 Flag for East (CM-ES) 👨🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁣󠁩󠁷󠁲󠁿 Flag for Woroba (CI-WR) 🏴󠁣󠁩󠁬󠁧󠁿 Flag for Lagunes (CI-LG) 🏴󠁣󠁩󠁧󠁤󠁿 Flag for Gôh-Djiboua (CI-GD) 🏴󠁣󠁩󠁣󠁭󠁿 Flag for Comoé (CI-CM) 🏴󠁣󠁭󠁳󠁷󠁿 Flag for Southwest (CM-SW) 🏴󠁣󠁬󠁢󠁩󠁿 Flag for Bío Bío (CL-BI) 🏴󠁣󠁬󠁡󠁩󠁿 Flag for Aysén (CL-AI) 🏴󠁣󠁬󠁲󠁭󠁿 Flag for Santiago Metropolitan (CL-RM) 🏴󠁣󠁬󠁴󠁡󠁿 Flag for Tarapacá (CL-TA) 🏴󠁣󠁭󠁳󠁵󠁿 Flag for South (CM-SU) 🏴󠁣󠁬󠁡󠁴󠁿 Flag for Atacama (CL-AT) 🏴󠁣󠁮󠀱󠀲󠁿 Flag for Tianjin (CN-12) 🏴󠁣󠁩󠁬󠁣󠁿 Flag for Lacs (CI-LC) 🏴󠁣󠁬󠁣󠁯󠁿 Flag for Coquimbo (CL-CO) 🏴󠁣󠁬󠁡󠁰󠁿 Flag for Arica y Parinacota (CL-AP) 🏴󠁣󠁭󠁬󠁴󠁿 Flag for Littoral (CM-LT) 🏴󠁣󠁭󠁣󠁥󠁿 Flag for Centre (CM-CE) 🏴󠁣󠁭󠁥󠁮󠁿 Flag for Far North (CM-EN) 🏴󠁣󠁬󠁭󠁡󠁿 Flag for Magallanes Region (CL-MA) 🏴󠁣󠁬󠁭󠁬󠁿 Flag for Maule (CL-ML) 🏴󠁣󠁩󠁭󠁧󠁿 Flag for Montagnes (CI-MG) 🏴󠁣󠁩󠁢󠁳󠁿 Flag for Bas-Sassandra (CI-BS) 🏴󠁣󠁭󠁡󠁤󠁿 Flag for Adamawa (CM-AD) 🏴󠁣󠁬󠁬󠁲󠁿 Flag for Los Ríos (CL-LR) 🏴󠁣󠁭󠁯󠁵󠁿 Flag for West (CM-OU) 🏴󠁣󠁩󠁳󠁶󠁿 Flag for Savanes (CI-SV) 🏴󠁣󠁬󠁬󠁬󠁿 Flag for Los Lagos (CL-LL) 🏴󠁣󠁮󠀳󠀷󠁿 Flag for Shandong (CN-37) 🏴󠁣󠁮󠀶󠀲󠁿 Flag for Gansu (CN-62) 🏴󠁣󠁮󠀳󠀱󠁿 Flag for Shanghai (CN-31) 🏴󠁣󠁮󠀳󠀶󠁿 Flag for Jiangxi (CN-36) 🏴󠁣󠁮󠀷󠀱󠁿 Flag for Taiwan (CN-71) 🏴󠁣󠁯󠁢󠁯󠁹󠁿 Flag for Boyacá (CO-BOY) 🏴󠁣󠁮󠀱󠀱󠁿 Flag for Beijing (CN-11) 🏴󠁢󠁧󠀱󠀸󠁿 Flag for Ruse (BG-18) 🏴󠁣󠁮󠀴󠀴󠁿 Flag for Guangdong (CN-44) 🏴󠁣󠁮󠀶󠀳󠁿 Flag for Qinghai (CN-63) 🏴󠁣󠁮󠀲󠀳󠁿 Flag for Heilongjiang (CN-23) 🏴󠁣󠁮󠀵󠀱󠁿 Flag for Sichuan (CN-51) 🏴󠁣󠁯󠁣󠁡󠁬󠁿 Flag for Caldas (CO-CAL) 🏴󠁣󠁯󠁢󠁯󠁬󠁿 Flag for Bolívar (CO-BOL) 🏴󠁣󠁮󠀵󠀳󠁿 Flag for Yunnan (CN-53) 🏴󠁣󠁯󠁡󠁴󠁬󠁿 Flag for Atlántico (CO-ATL) 🏴󠁣󠁮󠀴󠀲󠁿 Flag for Hubei (CN-42) 🏴󠁣󠁮󠀲󠀲󠁿 Flag for Jilin (CN-22) 🏴󠁣󠁯󠁣󠁡󠁱󠁿 Flag for Caquetá (CO-CAQ) 🏴󠁣󠁮󠀳󠀳󠁿 Flag for Zhejiang (CN-33) 🏴󠁣󠁮󠀱󠀳󠁿 Flag for Hebei (CN-13) 🏴󠁣󠁮󠀱󠀵󠁿 Flag for Inner Mongolia (CN-15) 🏴󠁣󠁮󠀴󠀳󠁿 Flag for Hunan (CN-43) 🏴󠁣󠁦󠁨󠁫󠁿 Flag for Haute-Kotto (CF-HK) 🏴󠁣󠁮󠀶󠀵󠁿 Flag for Xinjiang (CN-65) 🏴󠁣󠁮󠀵󠀰󠁿 Flag for Chongqing (CN-50) 🏴󠁣󠁮󠀴󠀵󠁿 Flag for Guangxi (CN-45) 🏴󠁣󠁮󠀵󠀴󠁿 Flag for Tibet (CN-54) 🏴󠁣󠁮󠀳󠀲󠁿 Flag for Jiangsu (CN-32) 🏴󠁣󠁯󠁡󠁲󠁡󠁿 Flag for Arauca (CO-ARA) 🏴󠁣󠁮󠀳󠀵󠁿 Flag for Fujian (CN-35) 🏴󠁣󠁮󠀴󠀱󠁿 Flag for Henan (CN-41) 🏴󠁣󠁮󠀴󠀶󠁿 Flag for Hainan (CN-46) 🏴󠁣󠁮󠀱󠀴󠁿 Flag for Shanxi (CN-14) 🏴󠁣󠁯󠁭󠁡󠁧󠁿 Flag for Magdalena (CO-MAG) 🏴󠁣󠁯󠁣󠁨󠁯󠁿 Flag for Chocó (CO-CHO) 🏴󠁣󠁯󠁧󠁵󠁡󠁿 Flag for Guainía (CO-GUA) 🏴󠁣󠁯󠁣󠁯󠁲󠁿 Flag for Córdoba (CO-COR) 🏴󠁣󠁯󠁰󠁵󠁴󠁿 Flag for Putumayo (CO-PUT) 🏴󠁣󠁯󠁳󠁡󠁮󠁿 Flag for Santander (CO-SAN) 🏴󠁣󠁵󠀰󠀵󠁿 Flag for Villa Clara (CU-05) 🏴󠁣󠁯󠁶󠁡󠁣󠁿 Flag for Valle del Cauca (CO-VAC) 🏴󠁣󠁯󠁱󠁵󠁩󠁿 Flag for Quindío (CO-QUI) 🏴󠁣󠁯󠁲󠁩󠁳󠁿 Flag for Risaralda (CO-RIS) 🏴󠁣󠁯󠁣󠁵󠁮󠁿 Flag for Cundinamarca (CO-CUN) 👨🏽‍👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁣󠁲󠁡󠁿 Flag for Alajuela (CR-A) 🏴󠁣󠁲󠁰󠁿 Flag for Puntarenas (CR-P) 🏴󠁣󠁯󠁨󠁵󠁩󠁿 Flag for Huila (CO-HUI) 🏴󠁣󠁯󠁶󠁡󠁵󠁿 Flag for Vaupés (CO-VAU) 🏴󠁣󠁯󠁣󠁡󠁵󠁿 Flag for Cauca (CO-CAU) 🏴󠁣󠁵󠀰󠀷󠁿 Flag for Sancti Spíritus (CU-07) 🏴󠁣󠁲󠁬󠁿 Flag for Limón (CR-L) 🏴󠁣󠁯󠁮󠁳󠁡󠁿 Flag for Norte de Santander (CO-NSA) 🏴󠁣󠁵󠀰󠀴󠁿 Flag for Matanzas (CU-04) 🏴󠁣󠁲󠁧󠁿 Flag for Guanacaste (CR-G) 🏴󠁣󠁵󠀰󠀳󠁿 Flag for Havana (CU-03) 👩🏾‍❤️‍💋‍👨 Kiss - Woman: Medium-Dark Skin Tone, Man 🏴󠁣󠁵󠀰󠀸󠁿 Flag for Ciego de Ávila (CU-08) 🏴󠁣󠁯󠁴󠁯󠁬󠁿 Flag for Tolima (CO-TOL) 🏴󠁣󠁵󠀰󠀹󠁿 Flag for Camagüey (CU-09) 🏴󠁣󠁵󠀰󠀶󠁿 Flag for Cienfuegos (CU-06) 🏴󠁣󠁯󠁧󠁵󠁶󠁿 Flag for Guaviare (CO-GUV) 🏴󠁢󠁺󠁣󠁹󠁿 Flag for Cayo (BZ-CY) 🏴󠁥󠁴󠁳󠁮󠁿 Flag for Southern Nations, Nationalities, and Peoples (ET-SN) 🏴󠁣󠁵󠀰󠀱󠁿 Flag for Pinar del Río (CU-01) 🏴󠁣󠁲󠁳󠁪󠁿 Flag for San José (CR-SJ) 🏴󠁣󠁲󠁣󠁿 Flag for Cartago (CR-C) 🏴󠁣󠁯󠁬󠁡󠁧󠁿 Flag for La Guajira (CO-LAG) 🏴󠁣󠁹󠀰󠀲󠁿 Flag for Limassol (CY-02) 🏴󠁤󠁥󠁮󠁩󠁿 Flag for Lower Saxony (DE-NI) 🏴󠁢󠁺󠁯󠁷󠁿 Flag for Orange Walk (BZ-OW) 🏴󠁣󠁺󠀶󠀳󠁿 Flag for Kraj Vysočina (CZ-63) 🏴󠁣󠁺󠀵󠀱󠁿 Flag for Liberecký kraj (CZ-51) 🏴󠁣󠁵󠀱󠀰󠁿 Flag for Las Tunas (CU-10) 🏴󠁣󠁵󠀱󠀳󠁿 Flag for Santiago de Cuba (CU-13) 👨🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁣󠁹󠀰󠀱󠁿 Flag for Nicosia (CY-01) 🏴󠁣󠁺󠀲󠀰󠁿 Flag for Středočeský kraj (CZ-20) 🏴󠁣󠁦󠁶󠁫󠁿 Flag for Vakaga (CF-VK) 🏴󠁣󠁺󠀵󠀲󠁿 Flag for Královéhradecký kraj (CZ-52) 🏴󠁣󠁺󠀴󠀱󠁿 Flag for Karlovarský kraj (CZ-41) 🏴󠁣󠁵󠀱󠀵󠁿 Flag for Artemisa (CU-15) 🏴󠁣󠁹󠀰󠀴󠁿 Flag for Famagusta (CY-04) 🏴󠁤󠁥󠁨󠁢󠁿 Flag for Bremen (DE-HB) 🏴󠁤󠁥󠁨󠁥󠁿 Flag for Hesse (DE-HE) 🏴󠁣󠁵󠀱󠀱󠁿 Flag for Holguín (CU-11) 👨🏿‍👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁣󠁺󠀸󠀰󠁿 Flag for Moravskoslezský kraj (CZ-80) 🏴󠁣󠁺󠀳󠀱󠁿 Flag for Jihočeský kraj (CZ-31) 🏴󠁣󠁨󠁧󠁬󠁿 Flag for Glarus (CH-GL) 🏴󠁣󠁺󠀱󠀰󠁿 Flag for Praha, Hlavní mešto (CZ-10) 🏴󠁣󠁹󠀰󠀳󠁿 Flag for Larnaca (CY-03) 🏴󠁤󠁥󠁨󠁨󠁿 Flag for Hamburg (DE-HH) 🏴󠁤󠁥󠁭󠁶󠁿 Flag for Mecklenburg-Vorpommern (DE-MV) 🏴󠁣󠁶󠁢󠁿 Flag for Barlavento Islands (CV-B) 🏴󠁣󠁶󠁳󠁿 Flag for Sotavento Islands (CV-S) 🏴󠁣󠁵󠀱󠀶󠁿 Flag for Mayabeque (CU-16) 🏴󠁣󠁺󠀷󠀱󠁿 Flag for Olomoucký kraj (CZ-71) 🏴󠁣󠁵󠀱󠀴󠁿 Flag for Guantánamo (CU-14) 🏴󠁤󠁥󠁢󠁢󠁿 Flag for Brandenburg (DE-BB) 🏴󠁣󠁺󠀳󠀲󠁿 Flag for Plzeňský kraj (CZ-32) 🏴󠁤󠁪󠁡󠁳󠁿 Flag for Ali Sabieh (DJ-AS) 🏴󠁤󠁥󠁲󠁰󠁿 Flag for Rhineland-Palatinate (DE-RP) 🏴󠁤󠁥󠁳󠁮󠁿 Flag for Saxony (DE-SN) 🏴󠁤󠁫󠀸󠀵󠁿 Flag for Zealand (DK-85) 🏴󠁤󠁥󠁳󠁴󠁿 Flag for Saxony-Anhalt (DE-ST) 🏴󠁤󠁺󠀰󠀲󠁿 Flag for Chlef (DZ-02) 🏴󠁤󠁭󠀰󠀷󠁿 Flag for Saint Luke (DM-07) 🏴󠁤󠁪󠁡󠁲󠁿 Flag for Arta (DJ-AR) 🏴󠁤󠁫󠀸󠀴󠁿 Flag for Capital Region (DK-84) 🏴󠁤󠁭󠀱󠀰󠁿 Flag for Saint Paul (DM-10) 🏴󠁤󠁯󠀳󠀶󠁿 Flag for Cibao Sur (DO-36) 🏴󠁤󠁯󠀳󠀸󠁿 Flag for Enriquillo (DO-38) 🏴󠁤󠁭󠀰󠀹󠁿 Flag for Saint Patrick (DM-09) 🏴󠁤󠁯󠀳󠀴󠁿 Flag for Cibao Noroeste (DO-34) 🏴󠁤󠁯󠀳󠀳󠁿 Flag for Cibao Nordeste (DO-33) 🏴󠁤󠁭󠀰󠀵󠁿 Flag for Saint John (DM-05) 🏴󠁤󠁯󠀴󠀲󠁿 Flag for Yuma (DO-42) 🏴󠁤󠁪󠁯󠁢󠁿 Flag for Obock (DJ-OB) 🏴󠁤󠁥󠁴󠁨󠁿 Flag for Thuringia (DE-TH) 🏴󠁤󠁯󠀴󠀰󠁿 Flag for Ozama (DO-40) 🏴󠁤󠁥󠁳󠁬󠁿 Flag for Saarland (DE-SL) 🏴󠁤󠁭󠀰󠀴󠁿 Flag for Saint George (DM-04) 🏴󠁤󠁭󠀰󠀳󠁿 Flag for Saint David (DM-03) 🏴󠁤󠁭󠀰󠀲󠁿 Flag for Saint Andrew (DM-02) 🏴󠁤󠁪󠁤󠁩󠁿 Flag for Dikhil (DJ-DI) 🏴󠁤󠁭󠀰󠀸󠁿 Flag for Saint Mark (DM-08) 🏴󠁤󠁪󠁴󠁡󠁿 Flag for Tadjourah (DJ-TA) 🏴󠁤󠁭󠀱󠀱󠁿 Flag for Saint Peter (DM-11) 🏴󠁤󠁯󠀴󠀱󠁿 Flag for Valdesia (DO-41) 🏴󠁤󠁯󠀳󠀹󠁿 Flag for Higüamo (DO-39) 🏴󠁤󠁺󠀰󠀳󠁿 Flag for Laghouat (DZ-03) 🏴󠁤󠁺󠀲󠀸󠁿 Flag for M’Sila (DZ-28) 🏴󠁤󠁺󠀳󠀳󠁿 Flag for Illizi (DZ-33) 👩🏿‍👨🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁤󠁺󠀱󠀵󠁿 Flag for Tizi Ouzou (DZ-15) 🏴󠁤󠁺󠀱󠀴󠁿 Flag for Tiaret (DZ-14) 🏴󠁤󠁺󠀱󠀹󠁿 Flag for Sétif (DZ-19) 🏴󠁤󠁺󠀱󠀷󠁿 Flag for Djelfa (DZ-17) 🏴󠁤󠁺󠀲󠀵󠁿 Flag for Constantine (DZ-25) 🏴󠁤󠁺󠀲󠀴󠁿 Flag for Guelma (DZ-24) 🏴󠁤󠁺󠀴󠀲󠁿 Flag for Tipasa (DZ-42) 🏴󠁤󠁺󠀰󠀵󠁿 Flag for Batna (DZ-05) 🏴󠁤󠁺󠀱󠀲󠁿 Flag for Tébessa (DZ-12) 🏴󠁤󠁺󠀰󠀷󠁿 Flag for Biskra (DZ-07) 🏴󠁤󠁺󠀳󠀰󠁿 Flag for Ouargla (DZ-30) 🏴󠁤󠁺󠀲󠀲󠁿 Flag for Sidi Bel Abbès (DZ-22) 🏴󠁤󠁺󠀱󠀱󠁿 Flag for Tamanghasset (DZ-11) 🏴󠁤󠁺󠀲󠀶󠁿 Flag for Médéa (DZ-26) 🏴󠁤󠁺󠀳󠀲󠁿 Flag for El Bayadh (DZ-32) 🏴󠁤󠁺󠀴󠀰󠁿 Flag for Khenchela (DZ-40) 🏴󠁤󠁺󠀳󠀸󠁿 Flag for Tissemsilt (DZ-38) 🏴󠁤󠁺󠀳󠀹󠁿 Flag for El Oued (DZ-39) 🏴󠁤󠁺󠀴󠀱󠁿 Flag for Souk Ahras (DZ-41) 🏴󠁤󠁺󠀱󠀳󠁿 Flag for Tlemcen (DZ-13) 🏴󠁤󠁺󠀰󠀶󠁿 Flag for Béjaïa (DZ-06) 🏴󠁤󠁺󠀴󠀳󠁿 Flag for Mila (DZ-43) 🏴󠁤󠁺󠀲󠀰󠁿 Flag for Saïda (DZ-20) 🏴󠁤󠁺󠀳󠀱󠁿 Flag for Oran (DZ-31) 🏴󠁤󠁺󠀱󠀰󠁿 Flag for Bouira (DZ-10) 🏴󠁤󠁺󠀳󠀵󠁿 Flag for Boumerdès (DZ-35) 🏴󠁤󠁺󠀳󠀶󠁿 Flag for El Tarf (DZ-36) 🏴󠁤󠁺󠀱󠀶󠁿 Flag for Algiers (DZ-16) 🏴󠁤󠁺󠀳󠀷󠁿 Flag for Tindouf (DZ-37) 🏴󠁤󠁺󠀲󠀳󠁿 Flag for Annaba (DZ-23) 🏴󠁤󠁺󠀰󠀹󠁿 Flag for Blida (DZ-09) 🏴󠁤󠁺󠀰󠀴󠁿 Flag for Oum El Bouaghi (DZ-04) 🏴󠁤󠁺󠀲󠀷󠁿 Flag for Mostaganem (DZ-27) 🏴󠁥󠁣󠁨󠁿 Flag for Chimborazo (EC-H) 🏴󠁤󠁺󠀴󠀷󠁿 Flag for Ghardaïa (DZ-47) 🏴󠁥󠁣󠁢󠁿 Flag for Bolívar (EC-B) 🏴󠁥󠁣󠁣󠁿 Flag for Carchi (EC-C) 🏴󠁤󠁺󠀴󠀴󠁿 Flag for Aïn Defla (DZ-44) 🏴󠁣󠁹󠀰󠀵󠁿 Flag for Paphos (CY-05) 🏴󠁤󠁺󠀴󠀸󠁿 Flag for Relizane (DZ-48) 🏴󠁥󠁣󠁳󠁿 Flag for Morona-Santiago (EC-S) 🏴󠁣󠁨󠁪󠁵󠁿 Flag for Jura (CH-JU) 🏴󠁥󠁣󠁳󠁥󠁿 Flag for Santa Elena (EC-SE) 🏴󠁥󠁥󠀵󠀷󠁿 Flag for Lääne (EE-57) 🏴󠁥󠁣󠁩󠁿 Flag for Imbabura (EC-I) 🏴󠁤󠁺󠀴󠀶󠁿 Flag for Aïn Témouchent (DZ-46) 🏴󠁥󠁣󠁷󠁿 Flag for Galápagos (EC-W) 🏴󠁥󠁣󠁮󠁿 Flag for Napo (EC-N) 👨🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁥󠁥󠀶󠀷󠁿 Flag for Pärnu (EE-67) 🏴󠁥󠁥󠀷󠀸󠁿 Flag for Tartu (EE-78) 🏴󠁥󠁣󠁡󠁿 Flag for Azuay (EC-A) 🏴󠁥󠁣󠁭󠁿 Flag for Manabí (EC-M) 🏴󠁥󠁣󠁯󠁿 Flag for El Oro (EC-O) 🏴󠁥󠁣󠁰󠁿 Flag for Pichincha (EC-P) 🏴󠁥󠁥󠀷󠀰󠁿 Flag for Rapla (EE-70) 🏴󠁥󠁥󠀷󠀴󠁿 Flag for Saare (EE-74) 👨🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁥󠁥󠀶󠀵󠁿 Flag for Põlva (EE-65) 🏴󠁥󠁣󠁹󠁿 Flag for Pastaza (EC-Y) 🏴󠁥󠁣󠁧󠁿 Flag for Guayas (EC-G) 🏴󠁥󠁣󠁲󠁿 Flag for Los Ríos (EC-R) 🏴󠁥󠁣󠁵󠁿 Flag for Sucumbíos (EC-U) 🏴󠁥󠁥󠀴󠀹󠁿 Flag for Jõgeva (EE-49) 🏴󠁥󠁥󠀸󠀲󠁿 Flag for Valga (EE-82) 🏴󠁥󠁣󠁬󠁿 Flag for Loja (EC-L) 🏴󠁥󠁣󠁤󠁿 Flag for Orellana (EC-D) 👨🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁤󠁺󠀴󠀵󠁿 Flag for Naama (DZ-45) 🏴󠁥󠁥󠀵󠀱󠁿 Flag for Järva (EE-51) 🏴󠁥󠁧󠁳󠁩󠁮󠁿 Flag for North Sinai (EG-SIN) 🏴󠁥󠁧󠁪󠁳󠁿 Flag for South Sinai (EG-JS) 🏴󠁥󠁧󠁫󠁮󠁿 Flag for Qena (EG-KN) 🏴󠁥󠁥󠀸󠀴󠁿 Flag for Viljandi (EE-84) 🏴󠁥󠁧󠁩󠁳󠁿 Flag for Ismailia (EG-IS) 🏴󠁥󠁧󠁡󠁳󠁮󠁿 Flag for Aswan (EG-ASN) 🏴󠁥󠁧󠁤󠁫󠁿 Flag for Dakahlia (EG-DK) 🏴󠁥󠁧󠁧󠁨󠁿 Flag for Gharbia (EG-GH) 🏴󠁥󠁧󠁢󠁨󠁿 Flag for Beheira (EG-BH) 🏴󠁥󠁥󠀸󠀶󠁿 Flag for Võru (EE-86) 🏴󠁥󠁧󠁡󠁳󠁴󠁿 Flag for Asyut (EG-AST) 🏴󠁥󠁧󠁫󠁢󠁿 Flag for Qalyubia (EG-KB) 🏴󠁥󠁧󠁧󠁺󠁿 Flag for Giza (EG-GZ) 👨🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁥󠁲󠁡󠁮󠁿 Flag for Anseba (ER-AN) 🏴󠁥󠁧󠁫󠁦󠁳󠁿 Flag for Kafr el-Sheikh (EG-KFS) 🏴󠁥󠁧󠁭󠁴󠁿 Flag for Matrouh (EG-MT) 🏴󠁥󠁲󠁧󠁢󠁿 Flag for Gash-Barka (ER-GB) 🏴󠁥󠁧󠁭󠁮󠁿 Flag for Minya (EG-MN) 🏴󠁥󠁧󠁡󠁬󠁸󠁿 Flag for Alexandria (EG-ALX) 🏴󠁥󠁲󠁤󠁫󠁿 Flag for Southern Red Sea (ER-DK) 🏴󠁥󠁧󠁰󠁴󠁳󠁿 Flag for Port Said (EG-PTS) 🏴󠁥󠁧󠁳󠁨󠁧󠁿 Flag for Sohag (EG-SHG) 🏴󠁥󠁧󠁷󠁡󠁤󠁿 Flag for New Valley (EG-WAD) 🏴󠁥󠁲󠁳󠁫󠁿 Flag for Northern Red Sea (ER-SK) 🏴󠁥󠁧󠁳󠁵󠁺󠁿 Flag for Suez (EG-SUZ) 🏴󠁥󠁧󠁭󠁮󠁦󠁿 Flag for Monufia (EG-MNF) 🏴󠁥󠁧󠁬󠁸󠁿 Flag for Luxor (EG-LX) 🏴󠁥󠁲󠁭󠁡󠁿 Flag for Maekel (ER-MA) 🏴󠁥󠁧󠁤󠁴󠁿 Flag for Damietta (EG-DT) 🏴󠁥󠁧󠁳󠁨󠁲󠁿 Flag for Al Sharqia (EG-SHR) 🏴󠁥󠁧󠁦󠁹󠁭󠁿 Flag for Faiyum (EG-FYM) 🏴󠁥󠁲󠁤󠁵󠁿 Flag for Debub (ER-DU) 🏴󠁥󠁳󠁡󠁲󠁿 Flag for Aragon (ES-AR) 🏴󠁣󠁮󠀳󠀴󠁿 Flag for Anhui (CN-34) 🏴󠁤󠁫󠀸󠀱󠁿 Flag for Northern Denmark (DK-81) 👨🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁥󠁴󠁴󠁩󠁿 Flag for Tigray (ET-TI) 🏴󠁣󠁮󠀲󠀱󠁿 Flag for Liaoning (CN-21) 🏴󠁥󠁴󠁧󠁡󠁿 Flag for Gambela (ET-GA) 🏴󠁥󠁳󠁭󠁬󠁿 Flag for Melilla (ES-ML) 🏴󠁥󠁳󠁭󠁣󠁿 Flag for Murcia Region (ES-MC) 🏴󠁦󠁩󠀱󠀰󠁿 Flag for Lapland (FI-10) 🏴󠁦󠁩󠀰󠀷󠁿 Flag for Central Ostrobothnia (FI-07) 🏴󠁥󠁴󠁡󠁭󠁿 Flag for Amhara (ET-AM) 🏴󠁥󠁴󠁢󠁥󠁿 Flag for Benishangul-Gumuz (ET-BE) 🏴󠁥󠁴󠁯󠁲󠁿 Flag for Oromia (ET-OR) 🏴󠁥󠁳󠁲󠁩󠁿 Flag for La Rioja (ES-RI) 🏴󠁤󠁪󠁤󠁪󠁿 Flag for Djibouti (DJ-DJ) 🏴󠁥󠁳󠁭󠁤󠁿 Flag for Madrid Autonomous Community (ES-MD) 🏴󠁥󠁴󠁤󠁤󠁿 Flag for Dire Dawa (ET-DD) 🏴󠁤󠁺󠀲󠀹󠁿 Flag for Mascara (DZ-29) 🏴󠁦󠁩󠀰󠀵󠁿 Flag for Kainuu (FI-05) 🏴󠁦󠁩󠀰󠀹󠁿 Flag for Kymenlaakso (FI-09) 🏴󠁦󠁩󠀰󠀳󠁿 Flag for Southern Ostrobothnia (FI-03) 🏴󠁦󠁩󠀱󠀱󠁿 Flag for Pirkanmaa (FI-11) 🏴󠁦󠁩󠀰󠀴󠁿 Flag for Southern Savonia (FI-04) 🏴󠁦󠁩󠀱󠀳󠁿 Flag for North Karelia (FI-13) 🏴󠁦󠁩󠀰󠀲󠁿 Flag for South Karelia (FI-02) 🏴󠁥󠁴󠁨󠁡󠁿 Flag for Harari (ET-HA) 🏴󠁣󠁺󠀷󠀲󠁿 Flag for Zlínský kraj (CZ-72) 🏴󠁥󠁴󠁳󠁯󠁿 Flag for Somali (ET-SO) 🏴󠁥󠁳󠁣󠁴󠁿 Flag for Catalonia (ES-CT) 🏴󠁦󠁭󠁫󠁳󠁡󠁿 Flag for Kosrae (FM-KSA) 🏴󠁦󠁲󠁮󠁣󠁿 Flag for New Caledonia (FR-NC) 🏴󠁦󠁲󠁯󠁣󠁣󠁿 Flag for Occitanie (FR-OCC) 🏴󠁦󠁲󠁰󠁡󠁣󠁿 Flag for Provence-Alpes-Côte-d’Azur (FR-PAC) 🏴󠁦󠁩󠀱󠀵󠁿 Flag for Northern Savonia (FI-15) 🏴󠁦󠁭󠁴󠁲󠁫󠁿 Flag for Chuuk (FM-TRK) 🏴󠁦󠁲󠁢󠁦󠁣󠁿 Flag for Bourgogne-Franche-Comté (FR-BFC) 🏴󠁦󠁩󠀱󠀴󠁿 Flag for Northern Ostrobothnia (FI-14) 🏴󠁦󠁪󠁲󠁿 Flag for Rotuma (FJ-R) 🏴󠁦󠁲󠁭󠁡󠁹󠁿 Flag for Mayotte (FR-MAY) 🏴󠁦󠁲󠁮󠁡󠁱󠁿 Flag for Nouvelle-Aquitaine (FR-NAQ) 🏴󠁦󠁪󠁣󠁿 Flag for Central (FJ-C) 🏴󠁦󠁲󠁧󠁥󠁳󠁿 Flag for Grand-Est (FR-GES) 🏴󠁦󠁪󠁮󠁿 Flag for Northern (FJ-N) 🏴󠁦󠁲󠁧󠁵󠁡󠁿 Flag for Guadeloupe (FR-GUA) 🏴󠁦󠁭󠁹󠁡󠁰󠁿 Flag for Yap (FM-YAP) 🏴󠁦󠁲󠁢󠁲󠁥󠁿 Flag for Bretagne (FR-BRE) 🏴󠁦󠁲󠁰󠁦󠁿 Flag for French Polynesia (FR-PF) 🏴󠁦󠁲󠁮󠁯󠁲󠁿 Flag for Normandie (FR-NOR) 🏴󠁦󠁲󠁧󠁦󠁿 Flag for French Guiana (FR-GF) 🏴󠁦󠁲󠁣󠁶󠁬󠁿 Flag for Centre-Val de Loire (FR-CVL) 🏴󠁦󠁲󠁣󠁰󠁿 Flag for Clipperton Island (FR-CP) 🏴󠁦󠁲󠁭󠁦󠁿 Flag for St. Martin (FR-MF) 🏴󠁦󠁩󠀱󠀶󠁿 Flag for Päijänne Tavastia (FI-16) 🏴󠁦󠁩󠀱󠀹󠁿 Flag for Southwest Finland (FI-19) 🏴󠁦󠁲󠁬󠁲󠁥󠁿 Flag for La Réunion (FR-LRE) 🏴󠁦󠁩󠀱󠀷󠁿 Flag for Satakunta (FI-17) 🏴󠁧󠁥󠁳󠁫󠁿 Flag for Shida Kartli (GE-SK) 🏴󠁧󠁡󠀳󠁿 Flag for Moyen-Ogooué (GA-3) 👨🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁧󠁤󠀰󠀳󠁿 Flag for Saint George (GD-03) 🏴󠁧󠁡󠀵󠁿 Flag for Nyanga (GA-5) 🏴󠁧󠁡󠀶󠁿 Flag for Ogooué-Ivindo (GA-6) 🏴󠁧󠁨󠁢󠁡󠁿 Flag for Brong-Ahafo (GH-BA) 🏴󠁧󠁡󠀲󠁿 Flag for Haut-Ogooué (GA-2) 🏴󠁧󠁤󠀰󠀱󠁿 Flag for Saint Andrew (GD-01) 🏴󠁧󠁤󠀰󠀶󠁿 Flag for Saint Patrick (GD-06) 🏴󠁥󠁳󠁧󠁡󠁿 Flag for Galicia (ES-GA) 🏴󠁦󠁲󠁷󠁦󠁿 Flag for Wallis & Futuna (FR-WF) 👨🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁦󠁲󠁰󠁭󠁿 Flag for St. Pierre & Miquelon (FR-PM) 🏴󠁧󠁤󠀰󠀴󠁿 Flag for Saint John (GD-04) 🏴󠁧󠁥󠁴󠁢󠁿 Flag for Tbilisi (GE-TB) 👨🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁧󠁤󠀰󠀲󠁿 Flag for Saint David (GD-02) 🏴󠁧󠁥󠁧󠁵󠁿 Flag for Guria (GE-GU) 🏴󠁧󠁡󠀹󠁿 Flag for Woleu-Ntem (GA-9) 🏴󠁧󠁥󠁲󠁬󠁿 Flag for Racha-Lechkhumi and Kvemo Svaneti (GE-RL) 🏴󠁧󠁥󠁳󠁪󠁿 Flag for Samtskhe-Javakheti (GE-SJ) 🏴󠁧󠁥󠁭󠁭󠁿 Flag for Mtskheta-Mtianeti (GE-MM) 🏴󠁧󠁥󠁩󠁭󠁿 Flag for Imereti (GE-IM) 🏴󠁧󠁡󠀸󠁿 Flag for Ogooué-Maritime (GA-8) 🏴󠁣󠁮󠀶󠀱󠁿 Flag for Shaanxi (CN-61) 🏴󠁧󠁨󠁡󠁡󠁿 Flag for Greater Accra (GH-AA) 🏴󠁣󠁺󠀶󠀴󠁿 Flag for Jihomoravský kraj (CZ-64) 🏴󠁧󠁥󠁡󠁪󠁿 Flag for Adjara (GE-AJ) 🏴󠁧󠁥󠁳󠁺󠁿 Flag for Samegrelo-Zemo Svaneti (GE-SZ) 🏴󠁧󠁡󠀱󠁿 Flag for Estuaire (GA-1) 🏴󠁧󠁡󠀷󠁿 Flag for Ogooué-Lolo (GA-7) 🏴󠁧󠁮󠁤󠁿 Flag for Kindia Region (GN-D) 🏴󠁧󠁮󠁭󠁿 Flag for Mamou Region (GN-M) 👨🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁧󠁬󠁱󠁡󠁿 Flag for Qaasuitsup (GL-QA) 🏴󠁧󠁭󠁮󠁿 Flag for North Bank Division (GM-N) 🏴󠁧󠁬󠁳󠁭󠁿 Flag for Sermersooq (GL-SM) 🏴󠁧󠁨󠁮󠁰󠁿 Flag for Northern (GH-NP) 🏴󠁧󠁲󠁦󠁿 Flag for Ionian Islands (GR-F) 🏴󠁧󠁲󠁨󠁿 Flag for Central Greece (GR-H) 🏴󠁧󠁨󠁣󠁰󠁿 Flag for Central (GH-CP) 🏴󠁧󠁮󠁫󠁿 Flag for Kankan Region (GN-K) 🏴󠁧󠁲󠁬󠁿 Flag for South Aegean (GR-L) 🏴󠁧󠁲󠁩󠁿 Flag for Attica (GR-I) 🏴󠁧󠁭󠁵󠁿 Flag for Upper River Division (GM-U) 🏴󠁧󠁨󠁥󠁰󠁿 Flag for Eastern (GH-EP) 🏴󠁧󠁮󠁮󠁿 Flag for Nzérékoré Region (GN-N) 🏴󠁧󠁨󠁷󠁰󠁿 Flag for Western (GH-WP) 🏴󠁧󠁲󠁣󠁿 Flag for West Macedonia (GR-C) 🏴󠁧󠁱󠁣󠁿 Flag for Río Muni (GQ-C) 🏴󠁧󠁭󠁬󠁿 Flag for Lower River Division (GM-L) 🏴󠁧󠁨󠁵󠁥󠁿 Flag for Upper East (GH-UE) 🏴󠁧󠁮󠁣󠁿 Flag for Conakry (GN-C) 🏴󠁧󠁲󠁢󠁿 Flag for Central Macedonia (GR-B) 🏴󠁧󠁭󠁭󠁿 Flag for Central River Division (GM-M) 🏴󠁧󠁨󠁵󠁷󠁿 Flag for Upper West (GH-UW) 🏴󠁧󠁬󠁫󠁵󠁿 Flag for Kujalleq (GL-KU) 🏴󠁧󠁮󠁢󠁿 Flag for Boké Region (GN-B) 🏴󠁧󠁬󠁱󠁥󠁿 Flag for Qeqqata (GL-QE) 🏴󠁧󠁲󠁤󠁿 Flag for Epirus (GR-D) 🏴󠁧󠁨󠁡󠁨󠁿 Flag for Ashanti (GH-AH) 🏴󠁧󠁨󠁴󠁶󠁿 Flag for Volta (GH-TV) 🏴󠁧󠁲󠀶󠀹󠁿 Flag for Mount Athos (GR-69) 🏴󠁧󠁱󠁩󠁿 Flag for Insular (GQ-I) 🏴󠁧󠁭󠁷󠁿 Flag for West Coast Division (GM-W) 🏴󠁧󠁭󠁢󠁿 Flag for Banjul (GM-B) 🏴󠁧󠁮󠁬󠁿 Flag for Labé Region (GN-L) 🏴󠁧󠁲󠁥󠁿 Flag for Thessaly (GR-E) 🏴󠁧󠁮󠁦󠁿 Flag for Faranah Region (GN-F) 🏴󠁧󠁹󠁣󠁵󠁿 Flag for Cuyuni-Mazaruni (GY-CU) 🏴󠁨󠁮󠁡󠁴󠁿 Flag for Atlántida (HN-AT) 👨🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁧󠁴󠁨󠁵󠁿 Flag for Huehuetenango (GT-HU) 🏴󠁧󠁴󠁡󠁶󠁿 Flag for Alta Verapaz (GT-AV) 🏴󠁧󠁴󠁰󠁲󠁿 Flag for El Progreso (GT-PR) 🏴󠁧󠁷󠁮󠁿 Flag for Norte (GW-N) 🏴󠁧󠁴󠁳󠁵󠁿 Flag for Suchitepéquez (GT-SU) 🏴󠁧󠁹󠁰󠁭󠁿 Flag for Pomeroon-Supenaam (GY-PM) 🏴󠁧󠁴󠁩󠁺󠁿 Flag for Izabal (GT-IZ) 🏴󠁧󠁹󠁰󠁴󠁿 Flag for Potaro-Siparuni (GY-PT) 🏴󠁧󠁴󠁱󠁺󠁿 Flag for Quetzaltenango (GT-QZ) 🏴󠁧󠁴󠁣󠁭󠁿 Flag for Chimaltenango (GT-CM) 🏴󠁥󠁴󠁡󠁡󠁿 Flag for Addis Ababa (ET-AA) 🏴󠁧󠁷󠁢󠁳󠁿 Flag for Bissau (GW-BS) 🏴󠁧󠁴󠁱󠁣󠁿 Flag for Quiché (GT-QC) 🏴󠁧󠁴󠁴󠁯󠁿 Flag for Totonicapán (GT-TO) 🏴󠁧󠁹󠁢󠁡󠁿 Flag for Barima-Waini (GY-BA) 🏴󠁧󠁹󠁥󠁳󠁿 Flag for Essequibo Islands-West Demerara (GY-ES) 👨🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁨󠁮󠁣󠁨󠁿 Flag for Choluteca (HN-CH) 🏴󠁧󠁹󠁤󠁥󠁿 Flag for Demerara-Mahaica (GY-DE) 👨🏻‍👨🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone 🏴󠁧󠁴󠁳󠁡󠁿 Flag for Sacatepéquez (GT-SA) 🏴󠁧󠁴󠁪󠁵󠁿 Flag for Jutiapa (GT-JU) 🏴󠁧󠁴󠁣󠁱󠁿 Flag for Chiquimula (GT-CQ) 🏴󠁧󠁴󠁢󠁶󠁿 Flag for Baja Verapaz (GT-BV) 🏴󠁧󠁴󠁥󠁳󠁿 Flag for Escuintla (GT-ES) 🏴󠁧󠁴󠁺󠁡󠁿 Flag for Zacapa (GT-ZA) 🏴󠁧󠁷󠁳󠁿 Flag for Sul (GW-S) 🏴󠁧󠁷󠁬󠁿 Flag for Leste (GW-L) 🏴󠁧󠁴󠁪󠁡󠁿 Flag for Jalapa (GT-JA) 🏴󠁧󠁴󠁰󠁥󠁿 Flag for Petén (GT-PE) 🏴󠁧󠁴󠁳󠁯󠁿 Flag for Sololá (GT-SO) 🏴󠁨󠁮󠁣󠁭󠁿 Flag for Comayagua (HN-CM) 🏴󠁨󠁲󠀰󠀶󠁿 Flag for Koprivnica-Križevci (HR-06) 🏴󠁨󠁮󠁣󠁰󠁿 Flag for Copán (HN-CP) 🏴󠁨󠁮󠁩󠁢󠁿 Flag for Bay Islands (HN-IB) 🏴󠁨󠁲󠀰󠀹󠁿 Flag for Lika-Senj (HR-09) 🏴󠁨󠁮󠁳󠁢󠁿 Flag for Santa Bárbara (HN-SB) 🏴󠁨󠁮󠁩󠁮󠁿 Flag for Intibucá (HN-IN) 🏴󠁨󠁮󠁦󠁭󠁿 Flag for Francisco Morazán (HN-FM) 🏴󠁨󠁲󠀰󠀱󠁿 Flag for Zagreb County (HR-01) 🏴󠁨󠁮󠁣󠁬󠁿 Flag for Colón (HN-CL) 🏴󠁨󠁴󠁣󠁥󠁿 Flag for Centre (HT-CE) 🏴󠁨󠁲󠀰󠀸󠁿 Flag for Primorje-Gorski Kotar (HR-08) 🏴󠁨󠁮󠁬󠁥󠁿 Flag for Lempira (HN-LE) 🏴󠁨󠁲󠀱󠀴󠁿 Flag for Osijek-Baranja (HR-14) 🏴󠁨󠁲󠀱󠀲󠁿 Flag for Brod-Posavina (HR-12) 🏴󠁨󠁲󠀱󠀷󠁿 Flag for Split-Dalmatia (HR-17) 🏴󠁨󠁮󠁯󠁬󠁿 Flag for Olancho (HN-OL) 🏴󠁨󠁮󠁬󠁰󠁿 Flag for La Paz (HN-LP) 🏴󠁨󠁲󠀲󠀰󠁿 Flag for Međimurje (HR-20) 🏴󠁨󠁮󠁥󠁰󠁿 Flag for El Paraíso (HN-EP) 🏴󠁨󠁲󠀲󠀱󠁿 Flag for Zagreb (HR-21) 🏴󠁨󠁲󠀱󠀵󠁿 Flag for Šibenik-Knin (HR-15) 🏴󠁥󠁥󠀴󠀴󠁿 Flag for Ida-Viru (EE-44) 🏴󠁨󠁮󠁣󠁲󠁿 Flag for Cortés (HN-CR) 🏴󠁨󠁲󠀰󠀳󠁿 Flag for Sisak-Moslavina (HR-03) 🏴󠁨󠁲󠀱󠀳󠁿 Flag for Zadar (HR-13) 🏴󠁨󠁲󠀱󠀸󠁿 Flag for Istria (HR-18) 🏴󠁨󠁲󠀰󠀲󠁿 Flag for Krapina-Zagorje (HR-02) 🏴󠁨󠁲󠀱󠀶󠁿 Flag for Vukovar-Syrmia (HR-16) 🏴󠁨󠁮󠁹󠁯󠁿 Flag for Yoro (HN-YO) 🏴󠁨󠁴󠁡󠁲󠁿 Flag for Artibonite (HT-AR) 🏴󠁨󠁮󠁧󠁤󠁿 Flag for Gracias a Dios (HN-GD) 🏴󠁨󠁮󠁶󠁡󠁿 Flag for Valle (HN-VA) 🏴󠁤󠁺󠀱󠀸󠁿 Flag for Jijel (DZ-18) 🏴󠁨󠁲󠀱󠀹󠁿 Flag for Dubrovnik-Neretva (HR-19) 🏴󠁨󠁲󠀱󠀱󠁿 Flag for Požega-Slavonia (HR-11) 🏴󠁨󠁲󠀰󠀷󠁿 Flag for Bjelovar-Bilogora (HR-07) 🏴󠁨󠁮󠁯󠁣󠁿 Flag for Ocotepeque (HN-OC) 🏴󠁨󠁵󠁢󠁵󠁿 Flag for Budapest (HU-BU) 🏴󠁨󠁵󠁨󠁶󠁿 Flag for Hódmezővásárhely (HU-HV) 🏴󠁨󠁵󠁦󠁥󠁿 Flag for Fejér (HU-FE) 🏴󠁨󠁵󠁢󠁡󠁿 Flag for Baranya (HU-BA) 🏴󠁨󠁵󠁳󠁦󠁿 Flag for Székesfehérvár (HU-SF) 🏴󠁨󠁵󠁢󠁺󠁿 Flag for Borsod-Abaúj-Zemplén (HU-BZ) 🏴󠁨󠁵󠁣󠁳󠁿 Flag for Csongrád (HU-CS) 🏴󠁨󠁵󠁳󠁮󠁿 Flag for Sopron (HU-SN) 🏴󠁨󠁵󠁤󠁵󠁿 Flag for Dunaújváros (HU-DU) 🏴󠁨󠁵󠁫󠁶󠁿 Flag for Kaposvár (HU-KV) 🏴󠁨󠁵󠁮󠁹󠁿 Flag for Nyíregyháza (HU-NY) 🏴󠁨󠁵󠁨󠁢󠁿 Flag for Hajdú-Bihar (HU-HB) 🏴󠁨󠁴󠁯󠁵󠁿 Flag for Ouest (HT-OU) 🏴󠁨󠁵󠁳󠁤󠁿 Flag for Szeged (HU-SD) 🏴󠁨󠁵󠁰󠁥󠁿 Flag for Pest (HU-PE) 🏴󠁨󠁵󠁫󠁥󠁿 Flag for Komárom-Esztergom (HU-KE) 🏴󠁨󠁵󠁮󠁫󠁿 Flag for Nagykanizsa (HU-NK) 🏴󠁨󠁴󠁧󠁡󠁿 Flag for Grand’Anse (HT-GA) 🏴󠁨󠁵󠁢󠁣󠁿 Flag for Békéscsaba (HU-BC) 🏴󠁨󠁴󠁳󠁤󠁿 Flag for Sud (HT-SD) 🏴󠁨󠁴󠁮󠁯󠁿 Flag for Nord-Ouest (HT-NO) 🏴󠁨󠁵󠁨󠁥󠁿 Flag for Heves (HU-HE) 🏴󠁨󠁵󠁢󠁫󠁿 Flag for Bács-Kiskun (HU-BK) 🏴󠁨󠁵󠁭󠁩󠁿 Flag for Miskolc (HU-MI) 🏴󠁨󠁵󠁥󠁲󠁿 Flag for Érd (HU-ER) 👨🏽‍👨🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁨󠁴󠁮󠁩󠁿 Flag for Nippes (HT-NI) 🏴󠁨󠁵󠁳󠁫󠁿 Flag for Szolnok (HU-SK) 🏴󠁨󠁴󠁮󠁤󠁿 Flag for Nord (HT-ND) 🏴󠁨󠁴󠁳󠁥󠁿 Flag for Sud-Est (HT-SE) 🏴󠁨󠁵󠁪󠁮󠁿 Flag for Jász-Nagykun-Szolnok (HU-JN) 🏴󠁨󠁵󠁰󠁳󠁿 Flag for Pécs (HU-PS) 🏴󠁨󠁵󠁫󠁭󠁿 Flag for Kecskemét (HU-KM) 🏴󠁨󠁵󠁤󠁥󠁿 Flag for Debrecen (HU-DE) 🏴󠁨󠁵󠁢󠁥󠁿 Flag for Békés (HU-BE) 🏴󠁨󠁵󠁮󠁯󠁿 Flag for Nógrád (HU-NO) 🏴󠁨󠁵󠁳󠁨󠁿 Flag for Szombathely (HU-SH) 🏴󠁨󠁵󠁧󠁹󠁿 Flag for Győr (HU-GY) 🏴󠁩󠁤󠁮󠁵󠁿 Flag for Lesser Sunda Islands (ID-NU) 🏴󠁨󠁵󠁴󠁢󠁿 Flag for Tatabánya (HU-TB) 🏴󠁩󠁤󠁪󠁷󠁿 Flag for Java (ID-JW) 🏴󠁩󠁮󠁣󠁨󠁿 Flag for Chandigarh (IN-CH) 🏴󠁩󠁮󠁧󠁪󠁿 Flag for Gujarat (IN-GJ) 🏴󠁩󠁥󠁬󠁿 Flag for Leinster (IE-L) 🏴󠁨󠁵󠁺󠁡󠁿 Flag for Zala (HU-ZA) 🏴󠁩󠁮󠁤󠁤󠁿 Flag for Daman and Diu (IN-DD) 🏴󠁩󠁬󠁴󠁡󠁿 Flag for Tel Aviv District (IL-TA) 🏴󠁩󠁤󠁳󠁬󠁿 Flag for Sulawesi (ID-SL) 🏴󠁩󠁮󠁡󠁲󠁿 Flag for Arunachal Pradesh (IN-AR) 🏴󠁨󠁵󠁶󠁥󠁿 Flag for Veszprém County (HU-VE) 🏴󠁩󠁮󠁡󠁮󠁿 Flag for Andaman and Nicobar Islands (IN-AN) 🏴󠁨󠁵󠁳󠁯󠁿 Flag for Somogy (HU-SO) 🏴󠁨󠁵󠁶󠁡󠁿 Flag for Vas (HU-VA) 🏴󠁩󠁬󠁪󠁭󠁿 Flag for Jerusalem (IL-JM) 🏴󠁩󠁮󠁤󠁮󠁿 Flag for Dadra and Nagar Haveli (IN-DN) 🏴󠁨󠁵󠁶󠁭󠁿 Flag for Veszprém (HU-VM) 🏴󠁨󠁵󠁳󠁴󠁿 Flag for Salgótarján (HU-ST) 🏴󠁩󠁮󠁣󠁴󠁿 Flag for Chhattisgarh (IN-CT) 🏴󠁩󠁥󠁵󠁿 Flag for Ulster (IE-U) 🏴󠁩󠁮󠁤󠁬󠁿 Flag for Delhi (IN-DL) 🏴󠁩󠁥󠁭󠁿 Flag for Munster (IE-M) 🏴󠁩󠁥󠁣󠁿 Flag for Connacht (IE-C) 🏴󠁩󠁬󠁨󠁡󠁿 Flag for Haifa District (IL-HA) 🏴󠁩󠁤󠁫󠁡󠁿 Flag for Kalimantan (ID-KA) 🏴󠁩󠁮󠁧󠁡󠁿 Flag for Goa (IN-GA) 🏴󠁩󠁤󠁳󠁭󠁿 Flag for Sumatra (ID-SM) 🏴󠁩󠁤󠁰󠁰󠁿 Flag for Papua Islands (ID-PP) 🏴󠁨󠁵󠁳󠁳󠁿 Flag for Szekszárd (HU-SS) 🏴󠁩󠁬󠁺󠁿 Flag for Northern District (IL-Z) 🏴󠁨󠁵󠁴󠁯󠁿 Flag for Tolna (HU-TO) 🏴󠁩󠁬󠁭󠁿 Flag for Central District (IL-M) 🏴󠁩󠁬󠁤󠁿 Flag for Southern District (IL-D) 🏴󠁩󠁮󠁢󠁲󠁿 Flag for Bihar (IN-BR) 🏴󠁨󠁵󠁺󠁥󠁿 Flag for Zalaegerszeg (HU-ZE) 🏴󠁩󠁮󠁡󠁰󠁿 Flag for Andhra Pradesh (IN-AP) 🏴󠁩󠁱󠁤󠁡󠁿 Flag for Dohuk (IQ-DA) 🏴󠁩󠁮󠁪󠁨󠁿 Flag for Jharkhand (IN-JH) 🏴󠁩󠁮󠁫󠁬󠁿 Flag for Kerala (IN-KL) 🏴󠁩󠁮󠁷󠁢󠁿 Flag for West Bengal (IN-WB) 🏴󠁩󠁮󠁯󠁲󠁿 Flag for Odisha (IN-OR) 🏴󠁩󠁮󠁰󠁹󠁿 Flag for Puducherry (IN-PY) 🏴󠁩󠁱󠁫󠁡󠁿 Flag for Karbala (IQ-KA) 🏴󠁩󠁱󠁳󠁤󠁿 Flag for Saladin (IQ-SD) 🏴󠁩󠁮󠁭󠁺󠁿 Flag for Mizoram (IN-MZ) 🏴󠁩󠁮󠁨󠁰󠁿 Flag for Himachal Pradesh (IN-HP) 🏴󠁩󠁮󠁭󠁰󠁿 Flag for Madhya Pradesh (IN-MP) 🏴󠁩󠁮󠁰󠁢󠁿 Flag for Punjab (IN-PB) 🏴󠁩󠁮󠁮󠁬󠁿 Flag for Nagaland (IN-NL) 🏴󠁩󠁱󠁱󠁡󠁿 Flag for Al-Qādisiyyah (IQ-QA) 🏴󠁩󠁱󠁤󠁩󠁿 Flag for Diyala (IQ-DI) 🏴󠁩󠁱󠁮󠁩󠁿 Flag for Nineveh (IQ-NI) 🏴󠁩󠁱󠁤󠁱󠁿 Flag for Dhi Qar (IQ-DQ) 🏴󠁩󠁮󠁭󠁬󠁿 Flag for Meghalaya (IN-ML) 🏴󠁩󠁮󠁴󠁮󠁿 Flag for Tamil Nadu (IN-TN) 🏴󠁩󠁱󠁮󠁡󠁿 Flag for Najaf (IQ-NA) 🏴󠁩󠁱󠁭󠁵󠁿 Flag for Al Muthanna (IQ-MU) 🏴󠁩󠁮󠁴󠁧󠁿 Flag for Telangana (IN-TG) 🏴󠁩󠁮󠁨󠁲󠁿 Flag for Haryana (IN-HR) 🏴󠁩󠁮󠁵󠁴󠁿 Flag for Uttarakhand (IN-UT) 🏴󠁩󠁮󠁴󠁲󠁿 Flag for Tripura (IN-TR) 🏴󠁩󠁱󠁢󠁧󠁿 Flag for Baghdad (IQ-BG) 🏴󠁩󠁮󠁬󠁤󠁿 Flag for Lakshadweep (IN-LD) 🏴󠁩󠁱󠁭󠁡󠁿 Flag for Maysan (IQ-MA) 🏴󠁩󠁱󠁢󠁡󠁿 Flag for Basra (IQ-BA) 🏴󠁩󠁱󠁡󠁲󠁿 Flag for Erbil (IQ-AR) 🏴󠁩󠁮󠁭󠁨󠁿 Flag for Maharashtra (IN-MH) 🏴󠁩󠁱󠁡󠁮󠁿 Flag for Al Anbar (IQ-AN) 🏴󠁩󠁮󠁳󠁫󠁿 Flag for Sikkim (IN-SK) 🏴󠁩󠁱󠁢󠁢󠁿 Flag for Babylon (IQ-BB) 🏴󠁩󠁮󠁵󠁰󠁿 Flag for Uttar Pradesh (IN-UP) 🏴󠁩󠁱󠁳󠁵󠁿 Flag for Sulaymaniyah (IQ-SU) 🏴󠁩󠁮󠁲󠁪󠁿 Flag for Rajasthan (IN-RJ) 🏴󠁩󠁮󠁪󠁫󠁿 Flag for Jammu and Kashmir (IN-JK) 🏴󠁩󠁲󠀰󠀸󠁿 Flag for Chaharmahal and Bakhtiari (IR-08) 🏴󠁩󠁲󠀲󠀶󠁿 Flag for Qom (IR-26) 🏴󠁩󠁳󠀱󠁿 Flag for Capital (IS-1) 👨🏾‍👨🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁩󠁲󠀰󠀳󠁿 Flag for Ardabil (IR-03) 🏴󠁩󠁲󠀲󠀵󠁿 Flag for Yazd (IR-25) 🏴󠁩󠁲󠀲󠀹󠁿 Flag for South Khorasan (IR-29) 👨🏿‍👨🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁩󠁲󠀲󠀴󠁿 Flag for Hamadan (IR-24) 🏴󠁧󠁹󠁭󠁡󠁿 Flag for Mahaica-Berbice (GY-MA) 🏴󠁩󠁳󠀳󠁿 Flag for Western (IS-3) 🏴󠁩󠁲󠀲󠀷󠁿 Flag for Golestan (IR-27) 🏴󠁩󠁲󠀱󠀱󠁿 Flag for Zanjan (IR-11) 🏴󠁩󠁲󠀲󠀰󠁿 Flag for Lorestan (IR-20) 🏴󠁩󠁲󠀱󠀷󠁿 Flag for Kermanshah (IR-17) 🏴󠁩󠁲󠀱󠀸󠁿 Flag for Kohgiluyeh and Boyer-Ahmad (IR-18) 🏴󠁥󠁧󠁣󠁿 Flag for Cairo (EG-C) 🏴󠁩󠁲󠀳󠀱󠁿 Flag for North Khorasan (IR-31) 🏴󠁩󠁲󠀰󠀶󠁿 Flag for Bushehr (IR-06) 🏴󠁥󠁳󠁥󠁸󠁿 Flag for Extremadura (ES-EX) 🏴󠁥󠁳󠁣󠁮󠁿 Flag for Canary Islands (ES-CN) 🏴󠁩󠁳󠀷󠁿 Flag for Eastern (IS-7) 🏴󠁩󠁲󠀰󠀵󠁿 Flag for Ilam (IR-05) 🏴󠁩󠁲󠀲󠀸󠁿 Flag for Qazvin (IR-28) 🏴󠁩󠁲󠀰󠀴󠁿 Flag for Isfahan (IR-04) 🏴󠁩󠁲󠀱󠀵󠁿 Flag for Kerman (IR-15) 🏴󠁩󠁲󠀲󠀳󠁿 Flag for Hormozgan (IR-23) 🏴󠁩󠁱󠁷󠁡󠁿 Flag for Wasit (IQ-WA) 🏴󠁩󠁴󠀲󠀱󠁿 Flag for Piedmont (IT-21) 🏴󠁩󠁳󠀶󠁿 Flag for Northeastern (IS-6) 🏴󠁩󠁳󠀵󠁿 Flag for Northwestern (IS-5) 🏴󠁩󠁲󠀲󠀲󠁿 Flag for Markazi (IR-22) 🏴󠁩󠁲󠀱󠀹󠁿 Flag for Gilan (IR-19) 🏴󠁩󠁲󠀱󠀰󠁿 Flag for Khuzestan (IR-10) 🏴󠁩󠁲󠀱󠀲󠁿 Flag for Semnan (IR-12) 🏴󠁩󠁳󠀲󠁿 Flag for Southern Peninsula (IS-2) 🏴󠁪󠁭󠀱󠀲󠁿 Flag for Manchester (JM-12) 🏴󠁪󠁯󠁩󠁲󠁿 Flag for Irbid (JO-IR) 🏴󠁪󠁭󠀰󠀵󠁿 Flag for Saint Mary (JM-05) 🏴󠁩󠁴󠀷󠀷󠁿 Flag for Basilicata (IT-77) 🏴󠁩󠁴󠀳󠀶󠁿 Flag for Friuli–Venezia Giulia (IT-36) 🏴󠁪󠁭󠀱󠀳󠁿 Flag for Clarendon (JM-13) 🏴󠁩󠁴󠀵󠀷󠁿 Flag for Marche (IT-57) 🏴󠁪󠁭󠀰󠀴󠁿 Flag for Portland (JM-04) 🏴󠁩󠁴󠀸󠀲󠁿 Flag for Sicily (IT-82) 🏴󠁩󠁴󠀳󠀴󠁿 Flag for Veneto (IT-34) 🏴󠁩󠁴󠀶󠀵󠁿 Flag for Abruzzo (IT-65) 🏴󠁩󠁴󠀶󠀷󠁿 Flag for Molise (IT-67) 🏴󠁪󠁯󠁢󠁡󠁿 Flag for Balqa (JO-BA) 🏴󠁩󠁴󠀷󠀵󠁿 Flag for Apulia (IT-75) 🏴󠁩󠁴󠀷󠀸󠁿 Flag for Calabria (IT-78) 🏴󠁩󠁴󠀵󠀲󠁿 Flag for Tuscany (IT-52) 🏴󠁪󠁭󠀰󠀹󠁿 Flag for Hanover (JM-09) 🏴󠁪󠁭󠀰󠀲󠁿 Flag for Saint Andrew (JM-02) 🏴󠁪󠁯󠁡󠁴󠁿 Flag for Tafilah (JO-AT) 🏴󠁩󠁴󠀵󠀵󠁿 Flag for Umbria (IT-55) 🏴󠁪󠁭󠀰󠀸󠁿 Flag for Saint James (JM-08) 🏴󠁪󠁭󠀰󠀶󠁿 Flag for Saint Ann (JM-06) 🏴󠁪󠁭󠀱󠀱󠁿 Flag for Saint Elizabeth (JM-11) 🏴󠁪󠁯󠁡󠁺󠁿 Flag for Zarqa (JO-AZ) 🏴󠁦󠁩󠀱󠀲󠁿 Flag for Ostrobothnia (FI-12) 🏴󠁩󠁴󠀶󠀲󠁿 Flag for Lazio (IT-62) 🏴󠁪󠁯󠁡󠁪󠁿 Flag for Ajloun (JO-AJ) 🏴󠁩󠁴󠀴󠀲󠁿 Flag for Liguria (IT-42) 🏴󠁪󠁭󠀰󠀷󠁿 Flag for Trelawny (JM-07) 🏴󠁪󠁯󠁡󠁱󠁿 Flag for Aqaba (JO-AQ) 🏴󠁪󠁯󠁪󠁡󠁿 Flag for Jerash (JO-JA) 🏴󠁪󠁯󠁡󠁭󠁿 Flag for Amman (JO-AM) 🏴󠁩󠁴󠀲󠀳󠁿 Flag for Aosta Valley (IT-23) 🏴󠁪󠁭󠀱󠀰󠁿 Flag for Westmoreland (JM-10) 🏴󠁪󠁰󠀰󠀸󠁿 Flag for Ibaraki (JP-08) 🏴󠁪󠁯󠁭󠁤󠁿 Flag for Madaba (JO-MD) 🏴󠁪󠁰󠀳󠀲󠁿 Flag for Shimane (JP-32) 🏴󠁪󠁰󠀲󠀶󠁿 Flag for Kyōto (JP-26) 🏴󠁣󠁬󠁡󠁲󠁿 Flag for Araucanía (CL-AR) 🏴󠁪󠁰󠀰󠀹󠁿 Flag for Tochigi (JP-09) 🏴󠁪󠁰󠀰󠀵󠁿 Flag for Akita (JP-05) 🏴󠁪󠁰󠀱󠀲󠁿 Flag for Chiba (JP-12) 🏴󠁪󠁰󠀰󠀴󠁿 Flag for Miyagi (JP-04) 🏴󠁪󠁰󠀱󠀵󠁿 Flag for Niigata (JP-15) 🏴󠁪󠁰󠀱󠀶󠁿 Flag for Toyama (JP-16) 🏴󠁪󠁰󠀲󠀳󠁿 Flag for Aichi (JP-23) 🏴󠁪󠁰󠀳󠀶󠁿 Flag for Tokushima (JP-36) 🏴󠁪󠁰󠀲󠀰󠁿 Flag for Nagano (JP-20) 🏴󠁪󠁰󠀳󠀱󠁿 Flag for Tottori (JP-31) 🏴󠁪󠁰󠀰󠀳󠁿 Flag for Iwate (JP-03) 🏴󠁪󠁰󠀳󠀳󠁿 Flag for Okayama (JP-33) 🏴󠁪󠁰󠀱󠀷󠁿 Flag for Ishikawa (JP-17) 🏴󠁪󠁰󠀳󠀰󠁿 Flag for Wakayama (JP-30) 🏴󠁪󠁰󠀱󠀰󠁿 Flag for Gunma (JP-10) 🏴󠁪󠁯󠁭󠁡󠁿 Flag for Mafraq (JO-MA) 🏴󠁪󠁰󠀳󠀵󠁿 Flag for Yamaguchi (JP-35) 🏴󠁣󠁵󠀱󠀲󠁿 Flag for Granma (CU-12) 🏴󠁪󠁰󠀲󠀵󠁿 Flag for Shiga (JP-25) 🏴󠁪󠁰󠀰󠀲󠁿 Flag for Aomori (JP-02) 🏴󠁪󠁰󠀱󠀱󠁿 Flag for Saitama (JP-11) 🏴󠁪󠁰󠀲󠀹󠁿 Flag for Nara (JP-29) 🏴󠁪󠁰󠀱󠀹󠁿 Flag for Yamanashi (JP-19) 🏴󠁪󠁰󠀳󠀴󠁿 Flag for Hiroshima (JP-34) 🏴󠁪󠁯󠁭󠁮󠁿 Flag for Ma’an (JO-MN) 🏴󠁪󠁰󠀲󠀲󠁿 Flag for Shizuoka (JP-22) 🏴󠁪󠁰󠀲󠀷󠁿 Flag for Ōsaka (JP-27) 🏴󠁪󠁰󠀲󠀴󠁿 Flag for Mie (JP-24) 🏴󠁪󠁰󠀰󠀶󠁿 Flag for Yamagata (JP-06) 🏴󠁪󠁰󠀲󠀸󠁿 Flag for Hyōgo (JP-28) 🏴󠁪󠁯󠁫󠁡󠁿 Flag for Karak (JO-KA) 🏴󠁪󠁰󠀳󠀸󠁿 Flag for Ehime (JP-38) 🏴󠁪󠁰󠀱󠀴󠁿 Flag for Kanagawa (JP-14) 🏴󠁪󠁰󠀳󠀷󠁿 Flag for Kagawa (JP-37) 🏴󠁫󠁥󠀰󠀷󠁿 Flag for Garissa (KE-07) 🏴󠁫󠁥󠀲󠀴󠁿 Flag for Mandera (KE-24) 🏴󠁪󠁰󠀴󠀶󠁿 Flag for Kagoshima (JP-46) 🏴󠁫󠁥󠀱󠀷󠁿 Flag for Kisumu (KE-17) 🏴󠁫󠁥󠀱󠀴󠁿 Flag for Kilifi (KE-14) 🏴󠁫󠁥󠀱󠀵󠁿 Flag for Kirinyaga (KE-15) 🏴󠁫󠁥󠀱󠀰󠁿 Flag for Kajiado (KE-10) 🏴󠁫󠁥󠀰󠀳󠁿 Flag for Bungoma (KE-03) 🏴󠁫󠁥󠀳󠀲󠁿 Flag for Nandi (KE-32) 🏴󠁫󠁥󠀱󠀳󠁿 Flag for Kiambu (KE-13) 🏴󠁫󠁥󠀲󠀰󠁿 Flag for Laikipia (KE-20) 🏴󠁫󠁥󠀲󠀱󠁿 Flag for Lamu (KE-21) 🏴󠁪󠁰󠀴󠀰󠁿 Flag for Fukuoka (JP-40) 🏴󠁫󠁥󠀰󠀴󠁿 Flag for Busia (KE-04) 🏴󠁪󠁰󠀴󠀱󠁿 Flag for Saga (JP-41) 🏴󠁫󠁥󠀲󠀷󠁿 Flag for Migori (KE-27) 🏴󠁫󠁥󠀰󠀶󠁿 Flag for Embu (KE-06) 👩🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁫󠁥󠀱󠀲󠁿 Flag for Kericho (KE-12) 🏴󠁫󠁥󠀰󠀹󠁿 Flag for Isiolo (KE-09) 🏴󠁫󠁥󠀱󠀹󠁿 Flag for Kwale (KE-19) 🏴󠁪󠁰󠀴󠀲󠁿 Flag for Nagasaki (JP-42) 🏴󠁫󠁥󠀳󠀰󠁿 Flag for Nairobi County (KE-30) 🏴󠁫󠁥󠀲󠀳󠁿 Flag for Makueni (KE-23) 🏴󠁫󠁥󠀲󠀹󠁿 Flag for Murang’a (KE-29) 🏴󠁪󠁰󠀳󠀹󠁿 Flag for Kōchi (JP-39) 🏴󠁫󠁥󠀰󠀲󠁿 Flag for Bomet (KE-02) 🏴󠁫󠁥󠀲󠀸󠁿 Flag for Mombasa (KE-28) 🏴󠁫󠁥󠀰󠀸󠁿 Flag for Homa Bay (KE-08) 🏴󠁫󠁥󠀱󠀱󠁿 Flag for Kakamega (KE-11) 🏴󠁫󠁥󠀲󠀲󠁿 Flag for Machakos (KE-22) 🏴󠁫󠁥󠀱󠀶󠁿 Flag for Kisii (KE-16) 🏴󠁫󠁥󠀰󠀵󠁿 Flag for Elgeyo-Marakwet (KE-05) 🏴󠁪󠁰󠀴󠀴󠁿 Flag for Ōita (JP-44) 🏴󠁫󠁥󠀳󠀳󠁿 Flag for Narok (KE-33) 🏴󠁫󠁥󠀲󠀶󠁿 Flag for Meru (KE-26) 🏴󠁪󠁰󠀴󠀳󠁿 Flag for Kumamoto (JP-43) 🏴󠁪󠁰󠀴󠀵󠁿 Flag for Miyazaki (JP-45) 🏴󠁫󠁨󠀱󠀹󠁿 Flag for Stung Treng (KH-19) 🏴󠁫󠁥󠀳󠀷󠁿 Flag for Samburu (KE-37) 🏴󠁫󠁥󠀴󠀷󠁿 Flag for West Pokot (KE-47) 🏴󠁫󠁥󠀳󠀹󠁿 Flag for Taita-Taveta (KE-39) 🏴󠁫󠁨󠀱󠀴󠁿 Flag for Prey Veng (KH-14) 🏴󠁫󠁥󠀴󠀱󠁿 Flag for Tharaka-Nithi (KE-41) 🏴󠁫󠁧󠁯󠁿 Flag for Osh Region (KG-O) 🏴󠁫󠁨󠀲󠀵󠁿 Flag for Tbong Khmum (KH-25) 🏴󠁫󠁧󠁴󠁿 Flag for Talas (KG-T) 🏴󠁫󠁨󠀱󠀲󠁿 Flag for Phnom Penh (KH-12) 🏴󠁫󠁧󠁧󠁢󠁿 Flag for Bishkek (KG-GB) 🏴󠁫󠁥󠀴󠀴󠁿 Flag for Uasin Gishu (KE-44) 🏴󠁫󠁨󠀲󠀳󠁿 Flag for Kep (KH-23) 🏴󠁫󠁨󠀱󠀰󠁿 Flag for Kratié (KH-10) 🏴󠁫󠁨󠀲󠀱󠁿 Flag for Takéo (KH-21) 🏴󠁫󠁨󠀲󠁿 Flag for Battambang (KH-2) 🏴󠁫󠁥󠀳󠀶󠁿 Flag for Nyeri (KE-36) 🏴󠁫󠁨󠀱󠀳󠁿 Flag for Preah Vihear (KH-13) 🏴󠁫󠁥󠀴󠀰󠁿 Flag for Tana River (KE-40) 🏴󠁫󠁨󠀲󠀴󠁿 Flag for Pailin (KH-24) 🏴󠁫󠁨󠀱󠀶󠁿 Flag for Ratanakiri (KH-16) 🏴󠁫󠁨󠀲󠀲󠁿 Flag for Oddar Meanchey (KH-22) 🏴󠁫󠁥󠀴󠀲󠁿 Flag for Trans Nzoia (KE-42) 🏴󠁫󠁨󠀱󠀸󠁿 Flag for Sihanoukville (KH-18) 🏴󠁫󠁥󠀴󠀵󠁿 Flag for Vihiga (KE-45) 🏴󠁫󠁧󠁧󠁯󠁿 Flag for Osh (KG-GO) 🏴󠁫󠁧󠁢󠁿 Flag for Batken (KG-B) 🏴󠁫󠁧󠁪󠁿 Flag for Jalal-Abad (KG-J) 🏴󠁫󠁨󠀱󠀱󠁿 Flag for Mondulkiri (KH-11) 🏴󠁫󠁨󠀱󠀷󠁿 Flag for Siem Reap (KH-17) 🏴󠁫󠁥󠀴󠀳󠁿 Flag for Turkana (KE-43) 🏴󠁫󠁨󠀱󠁿 Flag for Banteay Meanchey (KH-1) 🏴󠁫󠁧󠁮󠁿 Flag for Naryn (KG-N) 🏴󠁫󠁥󠀳󠀵󠁿 Flag for Nyandarua (KE-35) 🏴󠁫󠁥󠀳󠀸󠁿 Flag for Siaya (KE-38) 🏴󠁫󠁥󠀳󠀴󠁿 Flag for Nyamira (KE-34) 🏴󠁫󠁨󠀱󠀵󠁿 Flag for Pursat (KH-15) 🏴󠁫󠁥󠀴󠀶󠁿 Flag for Wajir (KE-46) 🏴󠁫󠁧󠁹󠁿 Flag for Issyk-Kul (KG-Y) 🏴󠁫󠁧󠁣󠁿 Flag for Chuy (KG-C) 🏴󠁫󠁭󠁭󠁿 Flag for Mohéli (KM-M) 🏴󠁫󠁲󠀱󠀱󠁿 Flag for Seoul (KR-11) 🏴󠁫󠁨󠀴󠁿 Flag for Kampong Chhnang (KH-4) 🏴󠁫󠁲󠀳󠀰󠁿 Flag for Daejeon (KR-30) 🏴󠁫󠁰󠀰󠀵󠁿 Flag for South Hwanghae (KP-05) 🏴󠁫󠁨󠀷󠁿 Flag for Kampot (KH-7) 🏴󠁫󠁮󠁮󠁿 Flag for Nevis (KN-N) 🏴󠁫󠁰󠀰󠀴󠁿 Flag for Chagang (KP-04) 🏴󠁫󠁲󠀴󠀶󠁿 Flag for South Jeolla (KR-46) 🏴󠁫󠁰󠀰󠀶󠁿 Flag for North Hwanghae (KP-06) 🏴󠁫󠁮󠁫󠁿 Flag for Saint Kitts (KN-K) 🏴󠁫󠁨󠀵󠁿 Flag for Kampong Speu (KH-5) 🏴󠁫󠁲󠀴󠀵󠁿 Flag for North Jeolla (KR-45) 🏴󠁫󠁰󠀰󠀳󠁿 Flag for North Pyongan (KP-03) 🏴󠁫󠁨󠀹󠁿 Flag for Koh Kong (KH-9) 🏴󠁫󠁰󠀰󠀷󠁿 Flag for Kangwon (KP-07) 🏴󠁫󠁲󠀲󠀶󠁿 Flag for Busan (KR-26) 🏴󠁫󠁲󠀲󠀹󠁿 Flag for Gwangju City (KR-29) 🏴󠁫󠁨󠀳󠁿 Flag for Kampong Cham (KH-3) 🏴󠁫󠁲󠀴󠀳󠁿 Flag for North Chungcheong (KR-43) 🏴󠁫󠁨󠀸󠁿 Flag for Kandal (KH-8) 🏴󠁫󠁨󠀶󠁿 Flag for Kampong Thom (KH-6) 🏴󠁫󠁰󠀱󠀰󠁿 Flag for Ryanggang (KP-10) 🏴󠁫󠁰󠀰󠀲󠁿 Flag for South Pyongan (KP-02) 🏴󠁫󠁭󠁧󠁿 Flag for Grande Comore (KM-G) 🏴󠁫󠁰󠀰󠀸󠁿 Flag for South Hamgyong (KP-08) 🏴󠁫󠁰󠀱󠀳󠁿 Flag for Rason (KP-13) 🏴󠁫󠁲󠀲󠀷󠁿 Flag for Daegu (KR-27) 🏴󠁫󠁲󠀲󠀸󠁿 Flag for Incheon (KR-28) 🏴󠁫󠁲󠀴󠀲󠁿 Flag for Gangwon (KR-42) 🏴󠁫󠁰󠀰󠀱󠁿 Flag for Pyongyang (KP-01) 🏴󠁫󠁲󠀳󠀱󠁿 Flag for Ulsan (KR-31) 🏴󠁫󠁲󠀴󠀴󠁿 Flag for South Chungcheong (KR-44) 🏴󠁫󠁭󠁡󠁿 Flag for Anjouan (KM-A) 🏴󠁫󠁲󠀴󠀱󠁿 Flag for Gyeonggi (KR-41) 🏴󠁫󠁲󠀴󠀷󠁿 Flag for North Gyeongsang (KR-47) 🏴󠁫󠁰󠀰󠀹󠁿 Flag for North Hamgyong (KP-09) 🏴󠁬󠁡󠁨󠁯󠁿 Flag for Houaphanh (LA-HO) 🏴󠁫󠁺󠁢󠁡󠁹󠁿 Flag for Bayqongyr (KZ-BAY) 🏴󠁬󠁡󠁣󠁨󠁿 Flag for Champasak (LA-CH) 🏴󠁬󠁡󠁶󠁴󠁿 Flag for Vientiane (LA-VT) 🏴󠁫󠁷󠁨󠁡󠁿 Flag for Hawalli (KW-HA) 🏴󠁬󠁡󠁰󠁨󠁿 Flag for Phongsaly (LA-PH) 🏴󠁫󠁺󠁰󠁡󠁶󠁿 Flag for Pavlodar (KZ-PAV) 🏴󠁫󠁺󠁡󠁬󠁭󠁿 Flag for Almaty Region (KZ-ALM) 🏴󠁫󠁷󠁫󠁵󠁿 Flag for Al Asimah (KW-KU) 🏴󠁬󠁡󠁢󠁫󠁿 Flag for Bokeo (LA-BK) 🏴󠁬󠁡󠁡󠁴󠁿 Flag for Attapeu (LA-AT) 🏴󠁫󠁺󠁡󠁫󠁴󠁿 Flag for Aktobe (KZ-AKT) 🏴󠁫󠁺󠁡󠁴󠁹󠁿 Flag for Atyrau (KZ-ATY) 🏴󠁫󠁷󠁪󠁡󠁿 Flag for Al Jahra (KW-JA) 🏴󠁬󠁡󠁢󠁬󠁿 Flag for Bolikhamsai (LA-BL) 🏴󠁬󠁡󠁯󠁵󠁿 Flag for Oudomxay (LA-OU) 🏴󠁫󠁺󠁭󠁡󠁮󠁿 Flag for Mangystau (KZ-MAN) 🏴󠁫󠁺󠁺󠁡󠁰󠁿 Flag for West Kazakhstan (KZ-ZAP) 🏴󠁫󠁺󠁺󠁨󠁡󠁿 Flag for Jambyl (KZ-ZHA) 🏴󠁫󠁺󠁡󠁳󠁴󠁿 Flag for Astana (KZ-AST) 🏴󠁬󠁡󠁬󠁰󠁿 Flag for Luang Prabang (LA-LP) 🏴󠁫󠁷󠁦󠁡󠁿 Flag for Al Farwaniyah (KW-FA) 🏴󠁫󠁺󠁫󠁵󠁳󠁿 Flag for Kostanay (KZ-KUS) 🏴󠁫󠁺󠁡󠁬󠁡󠁿 Flag for Almaty (KZ-ALA) 🏴󠁫󠁺󠁫󠁡󠁲󠁿 Flag for Karagandy (KZ-KAR) 🏴󠁫󠁺󠁫󠁺󠁹󠁿 Flag for Kyzylorda (KZ-KZY) 🏴󠁬󠁡󠁳󠁬󠁿 Flag for Salavan (LA-SL) 🏴󠁬󠁡󠁬󠁭󠁿 Flag for Luang Namtha (LA-LM) 🏴󠁫󠁲󠀵󠀰󠁿 Flag for Sejong (KR-50) 🏴󠁫󠁷󠁭󠁵󠁿 Flag for Mubarak Al-Kabeer (KW-MU) 🏴󠁫󠁺󠁳󠁥󠁶󠁿 Flag for North Kazakhstan (KZ-SEV) 👩🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁫󠁷󠁡󠁨󠁿 Flag for Al Ahmadi (KW-AH) 🏴󠁬󠁡󠁫󠁨󠁿 Flag for Khammouane (LA-KH) 🏴󠁫󠁺󠁡󠁫󠁭󠁿 Flag for Akmola (KZ-AKM) 🏴󠁫󠁺󠁹󠁵󠁺󠁿 Flag for South Kazakhstan (KZ-YUZ) 🏴󠁬󠁩󠀰󠀹󠁿 Flag for Triesen (LI-09) 👨🏽‍👨🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 🏴󠁬󠁫󠀷󠁿 Flag for North Central (LK-7) 🏴󠁬󠁡󠁸󠁡󠁿 Flag for Sainyabuli (LA-XA) 🏴󠁬󠁢󠁡󠁫󠁿 Flag for Akkar (LB-AK) 🏴󠁬󠁣󠀰󠀷󠁿 Flag for Laborie (LC-07) 🏴󠁬󠁣󠀰󠀶󠁿 Flag for Gros Islet (LC-06) 🏴󠁬󠁢󠁡󠁳󠁿 Flag for North (LB-AS) 🏴󠁬󠁩󠀰󠀱󠁿 Flag for Balzers (LI-01) 🏴󠁬󠁫󠀲󠁿 Flag for Central (LK-2) 🏴󠁬󠁩󠀰󠀴󠁿 Flag for Mauren (LI-04) 🏴󠁬󠁢󠁮󠁡󠁿 Flag for Nabatieh (LB-NA) 🏴󠁬󠁣󠀰󠀵󠁿 Flag for Dennery (LC-05) 🏴󠁬󠁢󠁪󠁡󠁿 Flag for South (LB-JA) 🏴󠁬󠁩󠀱󠀱󠁿 Flag for Vaduz (LI-11) 🏴󠁬󠁣󠀰󠀲󠁿 Flag for Castries (LC-02) 🏴󠁬󠁫󠀸󠁿 Flag for Uva (LK-8) 🏴󠁬󠁩󠀱󠀰󠁿 Flag for Triesenberg (LI-10) 🏴󠁬󠁩󠀰󠀵󠁿 Flag for Planken (LI-05) 🏴󠁬󠁣󠀱󠀱󠁿 Flag for Vieux Fort (LC-11) 🏴󠁬󠁢󠁢󠁨󠁿 Flag for Baalbek-Hermel (LB-BH) 🏴󠁬󠁫󠀶󠁿 Flag for North Western (LK-6) 🏴󠁬󠁩󠀰󠀶󠁿 Flag for Ruggell (LI-06) 🏴󠁬󠁣󠀰󠀸󠁿 Flag for Micoud (LC-08) 🏴󠁬󠁩󠀰󠀲󠁿 Flag for Eschen (LI-02) 🏴󠁬󠁣󠀱󠀲󠁿 Flag for Canaries (LC-12) 🏴󠁬󠁢󠁢󠁡󠁿 Flag for Beirut (LB-BA) 🏴󠁬󠁡󠁸󠁩󠁿 Flag for Xiangkhouang (LA-XI) 🏴󠁬󠁣󠀱󠀰󠁿 Flag for Soufrière (LC-10) 🏴󠁬󠁣󠀰󠀱󠁿 Flag for Anse la Raye (LC-01) 🏴󠁬󠁣󠀰󠀳󠁿 Flag for Choiseul (LC-03) 🏴󠁬󠁩󠀰󠀳󠁿 Flag for Gamprin (LI-03) 🏴󠁬󠁫󠀴󠁿 Flag for Northern (LK-4) 🏴󠁬󠁲󠁧󠁢󠁿 Flag for Grand Bassa (LR-GB) 🏴󠁬󠁲󠁧󠁰󠁿 Flag for Gbarpolu (LR-GP) 🏴󠁬󠁲󠁧󠁧󠁿 Flag for Grand Gedeh (LR-GG) 🏴󠁬󠁴󠀱󠀲󠁿 Flag for Jurbarkas (LT-12) 🏴󠁬󠁲󠁮󠁩󠁿 Flag for Nimba (LR-NI) 🏴󠁦󠁩󠀰󠀸󠁿 Flag for Central Finland (FI-08) 🏴󠁬󠁴󠀱󠀰󠁿 Flag for Jonava (LT-10) 🏴󠁬󠁲󠁭󠁧󠁿 Flag for Margibi (LR-MG) 🏴󠁬󠁲󠁳󠁩󠁿 Flag for Sinoe (LR-SI) 🏴󠁬󠁲󠁭󠁯󠁿 Flag for Montserrado (LR-MO) 🏴󠁬󠁴󠀱󠀶󠁿 Flag for Kaunas (LT-16) 🏴󠁬󠁳󠁫󠁿 Flag for Thaba-Tseka (LS-K) 🏴󠁬󠁴󠀰󠀵󠁿 Flag for Birštonas (LT-05) 🏴󠁬󠁳󠁦󠁿 Flag for Mohale’s Hoek (LS-F) 🏴󠁬󠁲󠁢󠁭󠁿 Flag for Bomi (LR-BM) 🏴󠁬󠁴󠀰󠀷󠁿 Flag for Druskininkai (LT-07) 🏴󠁬󠁴󠀱󠀴󠁿 Flag for Kalvarija (LT-14) 🏴󠁬󠁴󠀱󠀵󠁿 Flag for Kauno Municipality (LT-15) 🏴󠁬󠁳󠁨󠁿 Flag for Qacha’s Nek (LS-H) 🏴󠁬󠁴󠀰󠀴󠁿 Flag for Anykščiai (LT-04) 🏴󠁬󠁳󠁣󠁿 Flag for Leribe (LS-C) 🏴󠁬󠁴󠀱󠀱󠁿 Flag for Joniškis (LT-11) 🏴󠁬󠁲󠁬󠁯󠁿 Flag for Lofa (LR-LO) 🏴󠁬󠁲󠁲󠁩󠁿 Flag for Rivercess (LR-RI) 🏴󠁬󠁴󠀱󠀳󠁿 Flag for Kaišiadorys (LT-13) 🏴󠁬󠁴󠀰󠀸󠁿 Flag for Elektrėnai (LT-08) 🏴󠁬󠁲󠁧󠁫󠁿 Flag for Grand Kru (LR-GK) 🏴󠁬󠁳󠁤󠁿 Flag for Berea (LS-D) 🏴󠁬󠁳󠁧󠁿 Flag for Quthing (LS-G) 🏴󠁬󠁳󠁢󠁿 Flag for Butha-Buthe (LS-B) 🏴󠁬󠁴󠀰󠀱󠁿 Flag for Akmenė (LT-01) 🏴󠁬󠁴󠀰󠀹󠁿 Flag for Ignalina (LT-09) 🏴󠁬󠁳󠁥󠁿 Flag for Mafeteng (LS-E) 🏴󠁬󠁳󠁪󠁿 Flag for Mokhotlong (LS-J) 🏴󠁬󠁴󠀰󠀳󠁿 Flag for Alytus (LT-03) 🏴󠁬󠁴󠀰󠀶󠁿 Flag for Biržai (LT-06) 🏴󠁣󠁦󠁫󠁢󠁿 Flag for Nana-Grébizi (CF-KB) 🏴󠁬󠁲󠁲󠁧󠁿 Flag for River Gee (LR-RG) 🏴󠁬󠁴󠀵󠀴󠁿 Flag for Utena (LT-54) 🏴󠁬󠁴󠀲󠀷󠁿 Flag for Molėtai (LT-27) 🏴󠁬󠁴󠀴󠀱󠁿 Flag for Šakiai (LT-41) 🏴󠁬󠁴󠀱󠀹󠁿 Flag for Kelmė (LT-19) 🏴󠁬󠁴󠀲󠀳󠁿 Flag for Kupiškis (LT-23) 🏴󠁬󠁴󠀵󠀶󠁿 Flag for Vilkaviškis (LT-56) 🏴󠁬󠁴󠀲󠀸󠁿 Flag for Neringa (LT-28) 🏴󠁬󠁴󠀳󠀳󠁿 Flag for Panevėžys (LT-33) 🏴󠁬󠁴󠀲󠀹󠁿 Flag for Pagėgiai (LT-29) 🏴󠁬󠁴󠀴󠀳󠁿 Flag for Šiaulių Municipality (LT-43) 🏴󠁬󠁴󠀳󠀱󠁿 Flag for Palanga (LT-31) 🏴󠁬󠁴󠀱󠀸󠁿 Flag for Kėdainiai (LT-18) 🏴󠁬󠁴󠀴󠀰󠁿 Flag for Rokiškis (LT-40) 🏴󠁬󠁴󠀴󠀵󠁿 Flag for Šilalė (LT-45) 🏴󠁬󠁴󠀵󠀲󠁿 Flag for Trakai (LT-52) 🏴󠁦󠁭󠁰󠁮󠁩󠁿 Flag for Pohnpei (FM-PNI) 🏴󠁬󠁴󠀳󠀶󠁿 Flag for Prienai (LT-36) 🏴󠁬󠁴󠀵󠀱󠁿 Flag for Telšiai (LT-51) 🏴󠁬󠁴󠀲󠀱󠁿 Flag for Klaipėda (LT-21) 🏴󠁬󠁴󠀱󠀷󠁿 Flag for Kazlų Rūda (LT-17) 🏴󠁬󠁴󠀴󠀷󠁿 Flag for Širvintos (LT-47) 🏴󠁬󠁴󠀳󠀰󠁿 Flag for Pakruojis (LT-30) 🏴󠁬󠁴󠀴󠀴󠁿 Flag for Šiauliai (LT-44) 🏴󠁬󠁴󠀲󠀲󠁿 Flag for Kretinga (LT-22) 🏴󠁬󠁴󠀴󠀶󠁿 Flag for Šilutė (LT-46) 🏴󠁬󠁴󠀴󠀲󠁿 Flag for Šalčininkai (LT-42) 🏴󠁬󠁴󠀳󠀸󠁿 Flag for Raseiniai (LT-38) 🏴󠁬󠁴󠀵󠀵󠁿 Flag for Varėna (LT-55) 🏴󠁬󠁴󠀳󠀴󠁿 Flag for Pasvalys (LT-34) 🏴󠁬󠁴󠀳󠀵󠁿 Flag for Plungė (LT-35) 🏴󠁬󠁴󠀴󠀹󠁿 Flag for Švenčionys (LT-49) 🏴󠁬󠁴󠀳󠀷󠁿 Flag for Radviliškis (LT-37) 🏴󠁬󠁴󠀲󠀴󠁿 Flag for Lazdijai (LT-24) 🏴󠁬󠁴󠀵󠀰󠁿 Flag for Tauragė (LT-50) 🏴󠁬󠁴󠀴󠀸󠁿 Flag for Skuodas (LT-48) 🏴󠁬󠁴󠀵󠀳󠁿 Flag for Ukmergė (LT-53) 🏴󠁬󠁴󠀳󠀹󠁿 Flag for Rietavas (LT-39) 🏴󠁬󠁴󠀲󠀵󠁿 Flag for Marijampolė (LT-25) 🏴󠁬󠁴󠀲󠀶󠁿 Flag for Mažeikiai (LT-26) 🏴󠁬󠁶󠀰󠀱󠀳󠁿 Flag for Baldone (LV-013) 🏴󠁬󠁴󠁶󠁬󠁿 Flag for Vilnius County (LT-VL) 🏴󠁬󠁶󠀰󠀰󠀶󠁿 Flag for Alsunga (LV-006) 🏴󠁬󠁴󠀵󠀸󠁿 Flag for Vilnius (LT-58) 🏴󠁬󠁴󠁴󠁡󠁿 Flag for Tauragė County (LT-TA) 🏴󠁬󠁴󠁵󠁴󠁿 Flag for Utena County (LT-UT) 🏴󠁬󠁶󠀰󠀰󠀲󠁿 Flag for Aizkraukle (LV-002) 🏴󠁬󠁵󠁤󠁩󠁿 Flag for Diekirch (LU-DI) 🏴󠁬󠁴󠁭󠁲󠁿 Flag for Marijampolė County (LT-MR) 👩🏽‍👨🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁬󠁴󠁳󠁡󠁿 Flag for Šiauliai County (LT-SA) 🏴󠁬󠁵󠁥󠁣󠁿 Flag for Echternach (LU-EC) 🏴󠁬󠁵󠁲󠁤󠁿 Flag for Redange (LU-RD) 🏴󠁬󠁵󠁣󠁬󠁿 Flag for Clervaux (LU-CL) 🏴󠁬󠁴󠀵󠀹󠁿 Flag for Visaginas (LT-59) 🏴󠁬󠁶󠀰󠀰󠀹󠁿 Flag for Ape (LV-009) 🏴󠁬󠁶󠀰󠀰󠀸󠁿 Flag for Amata (LV-008) 🏴󠁬󠁴󠁡󠁬󠁿 Flag for Alytus County (LT-AL) 🏴󠁬󠁵󠁧󠁲󠁿 Flag for Grevenmacher (LU-GR) 🏴󠁬󠁶󠀰󠀰󠀱󠁿 Flag for Aglona (LV-001) 🏴󠁬󠁵󠁭󠁥󠁿 Flag for Mersch (LU-ME) 🏴󠁬󠁵󠁶󠁤󠁿 Flag for Vianden (LU-VD) 🏴󠁬󠁶󠀰󠀰󠀵󠁿 Flag for Aloja (LV-005) 🏴󠁬󠁢󠁪󠁬󠁿 Flag for Mount Lebanon (LB-JL) 🏴󠁬󠁴󠁫󠁵󠁿 Flag for Kaunas County (LT-KU) 🏴󠁬󠁴󠀶󠀰󠁿 Flag for Zarasai (LT-60) 🏴󠁬󠁵󠁷󠁩󠁿 Flag for Wiltz (LU-WI) 🏴󠁬󠁶󠀰󠀱󠀱󠁿 Flag for Ādaži (LV-011) 🏴󠁬󠁵󠁬󠁵󠁿 Flag for Luxembourg (LU-LU) 🏴󠁬󠁴󠁴󠁥󠁿 Flag for Telšiai County (LT-TE) 🏴󠁬󠁶󠀰󠀰󠀷󠁿 Flag for Alūksne (LV-007) 🏴󠁬󠁵󠁲󠁭󠁿 Flag for Remich (LU-RM) 🏴󠁬󠁶󠀰󠀰󠀴󠁿 Flag for Aknīste (LV-004) 🏴󠁬󠁵󠁥󠁳󠁿 Flag for Esch-sur-Alzette (LU-ES) 🏴󠁬󠁶󠀰󠀰󠀳󠁿 Flag for Aizpute (LV-003) 🏴󠁬󠁴󠁫󠁬󠁿 Flag for Klaipėda County (LT-KL) 🏴󠁬󠁶󠀰󠀲󠀷󠁿 Flag for Dundaga (LV-027) 🏴󠁬󠁶󠀰󠀴󠀰󠁿 Flag for Jaunpils (LV-040) 🏴󠁬󠁶󠀰󠀱󠀹󠁿 Flag for Burtnieki (LV-019) 🏴󠁬󠁶󠀰󠀱󠀵󠁿 Flag for Balvi (LV-015) 🏴󠁬󠁶󠀰󠀱󠀷󠁿 Flag for Beverīna (LV-017) 🏴󠁬󠁶󠀰󠀲󠀵󠁿 Flag for Daugavpils Municipality (LV-025) 🏴󠁬󠁶󠀰󠀲󠀱󠁿 Flag for Cesvaine (LV-021) 🏴󠁬󠁶󠀰󠀳󠀶󠁿 Flag for Ilūkste (LV-036) 🏴󠁬󠁶󠀰󠀵󠀰󠁿 Flag for Kuldīga (LV-050) 🏴󠁬󠁶󠀰󠀳󠀲󠁿 Flag for Grobiņa (LV-032) 🏴󠁬󠁶󠀰󠀳󠀳󠁿 Flag for Gulbene (LV-033) 🏴󠁬󠁶󠀰󠀴󠀳󠁿 Flag for Kandava (LV-043) 🏴󠁬󠁶󠀰󠀱󠀸󠁿 Flag for Brocēni (LV-018) 🏴󠁬󠁶󠀰󠀴󠀸󠁿 Flag for Krimulda (LV-048) 🏴󠁬󠁶󠀰󠀲󠀰󠁿 Flag for Carnikava (LV-020) 🏴󠁬󠁶󠀰󠀴󠀹󠁿 Flag for Krustpils (LV-049) 👩🏾‍👨🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁬󠁶󠀰󠀲󠀶󠁿 Flag for Dobele (LV-026) 🏴󠁬󠁶󠀰󠀴󠀵󠁿 Flag for Kocēni (LV-045) 🏴󠁬󠁶󠀰󠀳󠀱󠁿 Flag for Garkalne (LV-031) 🏴󠁬󠁶󠀰󠀳󠀰󠁿 Flag for Ērgļi (LV-030) 🏴󠁬󠁶󠀰󠀲󠀸󠁿 Flag for Durbe (LV-028) 🏴󠁬󠁶󠀰󠀴󠀷󠁿 Flag for Krāslava (LV-047) 🏴󠁬󠁶󠀰󠀲󠀴󠁿 Flag for Dagda (LV-024) 🏴󠁬󠁶󠀰󠀳󠀸󠁿 Flag for Jaunjelgava (LV-038) 🏴󠁬󠁶󠀰󠀱󠀶󠁿 Flag for Bauska (LV-016) 🏴󠁬󠁶󠀰󠀱󠀴󠁿 Flag for Baltinava (LV-014) 🏴󠁬󠁶󠀰󠀴󠀲󠁿 Flag for Jēkabpils Municipality (LV-042) 🏴󠁬󠁶󠀰󠀳󠀹󠁿 Flag for Jaunpiebalga (LV-039) 🏴󠁬󠁶󠀰󠀲󠀲󠁿 Flag for Cēsis (LV-022) 🏴󠁬󠁶󠀰󠀳󠀴󠁿 Flag for Iecava (LV-034) 🏴󠁬󠁶󠀰󠀵󠀱󠁿 Flag for Ķegums (LV-051) 🏴󠁬󠁶󠀰󠀳󠀵󠁿 Flag for Ikšķile (LV-035) 🏴󠁬󠁶󠀰󠀲󠀳󠁿 Flag for Cibla (LV-023) 🏴󠁬󠁶󠀰󠀴󠀴󠁿 Flag for Kārsava (LV-044) 🏴󠁬󠁶󠀰󠀲󠀹󠁿 Flag for Engure (LV-029) 🏴󠁬󠁶󠀰󠀵󠀵󠁿 Flag for Līgatne (LV-055) 🏴󠁬󠁶󠀰󠀶󠀶󠁿 Flag for Nīca (LV-066) 🏴󠁬󠁶󠀰󠀶󠀱󠁿 Flag for Mālpils (LV-061) 🏴󠁧󠁥󠁫󠁫󠁿 Flag for Kvemo Kartli (GE-KK) 🏴󠁬󠁶󠀰󠀷󠀰󠁿 Flag for Pārgauja (LV-070) 🏴󠁬󠁶󠀰󠀵󠀳󠁿 Flag for Lielvārde (LV-053) 🏴󠁬󠁶󠀰󠀷󠀲󠁿 Flag for Pļaviņas (LV-072) 🏴󠁬󠁶󠀰󠀷󠀱󠁿 Flag for Pāvilosta (LV-071) 🏴󠁬󠁶󠀰󠀵󠀹󠁿 Flag for Madona (LV-059) 🏴󠁬󠁶󠀰󠀷󠀶󠁿 Flag for Rauna (LV-076) 🏴󠁬󠁶󠀰󠀵󠀴󠁿 Flag for Limbaži (LV-054) 🏴󠁬󠁶󠀰󠀶󠀴󠁿 Flag for Naukšēni (LV-064) 🏴󠁬󠁶󠀰󠀵󠀲󠁿 Flag for Ķekava (LV-052) 🏴󠁬󠁶󠀰󠀸󠀷󠁿 Flag for Salaspils (LV-087) 🏴󠁬󠁶󠀰󠀶󠀳󠁿 Flag for Mērsrags (LV-063) 🏴󠁬󠁶󠀰󠀶󠀸󠁿 Flag for Olaine (LV-068) 🏴󠁬󠁶󠀰󠀷󠀹󠁿 Flag for Roja (LV-079) 🏴󠁬󠁶󠀰󠀸󠀱󠁿 Flag for Rucava (LV-081) 🏴󠁬󠁶󠀰󠀸󠀲󠁿 Flag for Rugāji (LV-082) 🏴󠁬󠁶󠀰󠀶󠀷󠁿 Flag for Ogre (LV-067) 🏴󠁬󠁶󠀰󠀸󠀴󠁿 Flag for Rūjiena (LV-084) 🏴󠁬󠁶󠀰󠀸󠀹󠁿 Flag for Saulkrasti (LV-089) 🏴󠁬󠁶󠀰󠀸󠀸󠁿 Flag for Saldus (LV-088) 🏴󠁬󠁶󠀰󠀸󠀳󠁿 Flag for Rundāle (LV-083) 🏴󠁬󠁶󠀰󠀶󠀵󠁿 Flag for Nereta (LV-065) 🏴󠁬󠁶󠀰󠀶󠀹󠁿 Flag for Ozolnieki (LV-069) 🏴󠁬󠁶󠀰󠀸󠀰󠁿 Flag for Ropaži (LV-080) 🏴󠁬󠁶󠀰󠀷󠀸󠁿 Flag for Riebiņi (LV-078) 🏴󠁬󠁶󠀰󠀵󠀶󠁿 Flag for Līvāni (LV-056) 🏴󠁬󠁶󠀰󠀷󠀵󠁿 Flag for Priekuļi (LV-075) 🏴󠁬󠁶󠀰󠀵󠀸󠁿 Flag for Ludza (LV-058) 🏴󠁬󠁶󠀰󠀹󠀰󠁿 Flag for Sēja (LV-090) 🏴󠁬󠁶󠀰󠀷󠀴󠁿 Flag for Priekule (LV-074) 🏴󠁬󠁶󠀰󠀵󠀷󠁿 Flag for Lubāna (LV-057) 🏴󠁬󠁶󠀰󠀸󠀶󠁿 Flag for Salacgrīva (LV-086) 🏴󠁬󠁶󠀰󠀶󠀲󠁿 Flag for Mārupe (LV-062) 🏴󠁬󠁶󠀰󠀷󠀳󠁿 Flag for Preiļi (LV-073) 🏴󠁬󠁶󠀱󠀰󠀷󠁿 Flag for Viesīte (LV-107) 🏴󠁬󠁶󠀰󠀹󠀴󠁿 Flag for Smiltene (LV-094) 🏴󠁬󠁹󠁫󠁦󠁿 Flag for Kufra (LY-KF) 🏴󠁬󠁶󠁤󠁧󠁶󠁿 Flag for Daugavpils (LV-DGV) 🏴󠁬󠁶󠀰󠀹󠀹󠁿 Flag for Tukums (LV-099) 👩🏿‍👨🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁬󠁶󠁬󠁰󠁸󠁿 Flag for Liepāja (LV-LPX) 🏴󠁬󠁶󠀱󠀰󠀱󠁿 Flag for Valka (LV-101) 🏴󠁬󠁶󠀱󠀰󠀳󠁿 Flag for Vārkava (LV-103) 🏴󠁬󠁹󠁭󠁢󠁿 Flag for Murqub (LY-MB) 🏴󠁬󠁶󠁶󠁥󠁮󠁿 Flag for Ventspils (LV-VEN) 🏴󠁬󠁹󠁪󠁡󠁿 Flag for Jabal al Akhdar (LY-JA) 🏴󠁬󠁶󠁪󠁫󠁢󠁿 Flag for Jēkabpils (LV-JKB) 🏴󠁬󠁶󠀰󠀹󠀱󠁿 Flag for Sigulda (LV-091) 🏴󠁬󠁹󠁪󠁧󠁿 Flag for Jabal al Gharbi (LY-JG) 🏴󠁬󠁹󠁧󠁴󠁿 Flag for Ghat (LY-GT) 🏴󠁬󠁶󠀰󠀹󠀵󠁿 Flag for Stopiņi (LV-095) 🏴󠁬󠁶󠁲󠁩󠁸󠁿 Flag for Riga (LV-RIX) 🏴󠁬󠁹󠁤󠁲󠁿 Flag for Derna (LY-DR) 🏴󠁬󠁶󠀱󠀰󠀰󠁿 Flag for Vaiņode (LV-100) 🏴󠁬󠁶󠀱󠀰󠀲󠁿 Flag for Varakļāni (LV-102) 🏴󠁬󠁶󠁪󠁥󠁬󠁿 Flag for Jelgava (LV-JEL) 🏴󠁬󠁶󠀰󠀹󠀲󠁿 Flag for Skrīveri (LV-092) 🏴󠁬󠁶󠀰󠀹󠀷󠁿 Flag for Talsi (LV-097) 🏴󠁬󠁶󠁶󠁭󠁲󠁿 Flag for Valmiera (LV-VMR) 🏴󠁬󠁹󠁢󠁡󠁿 Flag for Benghazi (LY-BA) 🏴󠁬󠁶󠁲󠁥󠁺󠁿 Flag for Rēzekne (LV-REZ) 🏴󠁬󠁶󠀰󠀹󠀳󠁿 Flag for Skrunda (LV-093) 🏴󠁬󠁶󠀱󠀱󠀰󠁿 Flag for Zilupe (LV-110) 🏴󠁬󠁶󠀰󠀹󠀶󠁿 Flag for Strenči (LV-096) 🏴󠁬󠁹󠁪󠁵󠁿 Flag for Jufra (LY-JU) 🏴󠁬󠁶󠀱󠀰󠀴󠁿 Flag for Vecpiebalga (LV-104) 🏴󠁬󠁶󠀱󠀰󠀵󠁿 Flag for Vecumnieki (LV-105) 🏴󠁬󠁶󠀱󠀰󠀸󠁿 Flag for Viļaka (LV-108) 🏴󠁬󠁶󠁪󠁵󠁲󠁿 Flag for Jūrmala (LV-JUR) 🏴󠁬󠁶󠀱󠀰󠀹󠁿 Flag for Viļāni (LV-109) 🏴󠁬󠁶󠀰󠀹󠀸󠁿 Flag for Tērvete (LV-098) 🏴󠁭󠁡󠀰󠀸󠁿 Flag for Grand Casablanca (MA-08) 🏴󠁬󠁹󠁭󠁪󠁿 Flag for Marj (LY-MJ) 🏴󠁬󠁹󠁷󠁡󠁿 Flag for Al Wahat (LY-WA) 🏴󠁭󠁣󠁭󠁣󠁿 Flag for Monte Carlo (MC-MC) 🏴󠁭󠁡󠀱󠀴󠁿 Flag for Guelmim-Es Semara (MA-14) 🏴󠁬󠁹󠁺󠁡󠁿 Flag for Zawiya (LY-ZA) 🏴󠁭󠁡󠀰󠀲󠁿 Flag for Gharb-Chrarda-Béni Hssen (MA-02) 🏴󠁭󠁡󠀱󠀱󠁿 Flag for Marrakesh-Tensift-El Haouz (MA-11) 🏴󠁭󠁡󠀱󠀰󠁿 Flag for Doukkala-Abda (MA-10) 👩🏽‍👩🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁭󠁡󠀰󠀷󠁿 Flag for Rabat-Salé-Zemmour-Zaer (MA-07) 🏴󠁭󠁡󠀱󠀶󠁿 Flag for Oued Ed-Dahab-Lagouira (MA-16) 🏴󠁬󠁹󠁮󠁬󠁿 Flag for Nalut (LY-NL) 🏴󠁬󠁹󠁳󠁢󠁿 Flag for Sabha (LY-SB) 🏴󠁭󠁡󠀰󠀳󠁿 Flag for Taza-Al Hoceima-Taounate (MA-03) 🏴󠁭󠁣󠁪󠁥󠁿 Flag for Jardin Exotique de Monaco (MC-JE) 🏴󠁬󠁹󠁷󠁳󠁿 Flag for Wadi al Shatii (LY-WS) 🏴󠁭󠁣󠁬󠁡󠁿 Flag for Larvotto (MC-LA) 🏴󠁬󠁹󠁮󠁱󠁿 Flag for Nuqat al Khams (LY-NQ) 🏴󠁭󠁣󠁭󠁡󠁿 Flag for Malbousquet (MC-MA) 🏴󠁭󠁡󠀱󠀲󠁿 Flag for Tadla-Azilal (MA-12) 🏴󠁭󠁣󠁣󠁯󠁿 Flag for La Condamine (MC-CO) 🏴󠁭󠁣󠁭󠁯󠁿 Flag for Monaco-Ville (MC-MO) 🏴󠁭󠁡󠀰󠀹󠁿 Flag for Chaouia-Ouardigha (MA-09) 🏴󠁭󠁡󠀰󠀱󠁿 Flag for Tangier-Tétouan (MA-01) 🏴󠁭󠁣󠁭󠁧󠁿 Flag for Moneghetti (MC-MG) 🏴󠁬󠁹󠁭󠁱󠁿 Flag for Murzuq (LY-MQ) 🏴󠁭󠁡󠀰󠀶󠁿 Flag for Meknès-Tafilalet (MA-06) 🏴󠁭󠁣󠁦󠁯󠁿 Flag for Fontvieille (MC-FO) 🏴󠁬󠁹󠁷󠁤󠁿 Flag for Wadi al Hayaa (LY-WD) 🏴󠁭󠁣󠁣󠁬󠁿 Flag for La Colle (MC-CL) 🏴󠁬󠁹󠁳󠁲󠁿 Flag for Sirte (LY-SR) 🏴󠁬󠁹󠁭󠁩󠁿 Flag for Misrata (LY-MI) 🏴󠁭󠁡󠀰󠀵󠁿 Flag for Fès-Boulemane (MA-05) 🏴󠁬󠁹󠁴󠁢󠁿 Flag for Tripoli (LY-TB) 🏴󠁭󠁣󠁧󠁡󠁿 Flag for La Gare (MC-GA) 👩🏾‍👩🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁭󠁤󠁥󠁤󠁿 Flag for Edineț (MD-ED) 🏴󠁭󠁤󠁨󠁩󠁿 Flag for Hîncești (MD-HI) 🏴󠁭󠁤󠁦󠁡󠁿 Flag for Fălești (MD-FA) 🏴󠁭󠁤󠁣󠁲󠁿 Flag for Criuleni (MD-CR) 🏴󠁭󠁤󠁳󠁩󠁿 Flag for Sîngerei (MD-SI) 🏴󠁭󠁤󠁳󠁯󠁿 Flag for Soroca (MD-SO) 🏴󠁭󠁤󠁣󠁴󠁿 Flag for Cantemir (MD-CT) 🏴󠁭󠁤󠁲󠁥󠁿 Flag for Rezina (MD-RE) 🏴󠁭󠁤󠁳󠁤󠁿 Flag for Șoldănești (MD-SD) 🏴󠁭󠁤󠁢󠁲󠁿 Flag for Briceni (MD-BR) 🏴󠁭󠁣󠁶󠁲󠁿 Flag for Vallon de la Rousse (MC-VR) 🏴󠁭󠁤󠁢󠁡󠁿 Flag for Bălţi (MD-BA) 🏴󠁭󠁤󠁤󠁵󠁿 Flag for Dubăsari (MD-DU) 🏴󠁭󠁤󠁣󠁬󠁿 Flag for Călărași (MD-CL) 🏴󠁭󠁣󠁳󠁰󠁿 Flag for Spélugues (MC-SP) 🏴󠁭󠁤󠁣󠁡󠁿 Flag for Cahul (MD-CA) 🏴󠁭󠁤󠁩󠁡󠁿 Flag for Ialoveni (MD-IA) 🏴󠁭󠁤󠁯󠁲󠁿 Flag for Orhei (MD-OR) 🏴󠁭󠁤󠁤󠁲󠁿 Flag for Drochia (MD-DR) 🏴󠁭󠁤󠁧󠁡󠁿 Flag for Gagauzia (MD-GA) 🏴󠁭󠁤󠁣󠁭󠁿 Flag for Cimișlia (MD-CM) 🏴󠁭󠁤󠁯󠁣󠁿 Flag for Ocniţa (MD-OC) 🏴󠁭󠁤󠁢󠁳󠁿 Flag for Basarabeasca (MD-BS) 🏴󠁭󠁤󠁳󠁴󠁿 Flag for Strășeni (MD-ST) 🏴󠁭󠁤󠁡󠁮󠁿 Flag for Anenii Noi (MD-AN) 🏴󠁭󠁣󠁭󠁵󠁿 Flag for Moulins (MC-MU) 🏴󠁭󠁤󠁢󠁤󠁿 Flag for Bender (MD-BD) 🏴󠁭󠁤󠁧󠁬󠁿 Flag for Glodeni (MD-GL) 🏴󠁭󠁣󠁳󠁯󠁿 Flag for La Source (MC-SO) 🏴󠁭󠁤󠁣󠁵󠁿 Flag for Chișinău (MD-CU) 🏴󠁭󠁤󠁤󠁯󠁿 Flag for Dondușeni (MD-DO) 🏴󠁭󠁤󠁦󠁬󠁿 Flag for Florești (MD-FL) 🏴󠁭󠁣󠁰󠁨󠁿 Flag for Port Hercules (MC-PH) 🏴󠁭󠁤󠁮󠁩󠁿 Flag for Nisporeni (MD-NI) 🏴󠁭󠁤󠁲󠁩󠁿 Flag for Rîșcani (MD-RI) 🏴󠁭󠁤󠁬󠁥󠁿 Flag for Leova (MD-LE) 🏴󠁭󠁤󠁳󠁶󠁿 Flag for Ştefan Vodă (MD-SV) 🏴󠁭󠁤󠁵󠁮󠁿 Flag for Ungheni (MD-UN) 🏴󠁭󠁧󠁡󠁿 Flag for Toamasina (MG-A) 🏴󠁭󠁧󠁴󠁿 Flag for Antananarivo (MG-T) 🏴󠁭󠁥󠀰󠀶󠁿 Flag for Cetinje (ME-06) 🏴󠁭󠁫󠀰󠀵󠁿 Flag for Bogdanci (MK-05) 🏴󠁭󠁥󠀲󠀰󠁿 Flag for Ulcinj (ME-20) 🏴󠁭󠁥󠀰󠀹󠁿 Flag for Kolašin (ME-09) 🏴󠁭󠁫󠀰󠀷󠁿 Flag for Bosilovo (MK-07) 🏴󠁭󠁥󠀱󠀴󠁿 Flag for Pljevlja (ME-14) 🏴󠁭󠁤󠁴󠁥󠁿 Flag for Telenești (MD-TE) 🏴󠁭󠁫󠀰󠀶󠁿 Flag for Bogovinje (MK-06) 🏴󠁭󠁥󠀲󠀱󠁿 Flag for Žabljak (ME-21) 🏴󠁭󠁥󠀰󠀸󠁿 Flag for Herceg Novi (ME-08) 🏴󠁭󠁥󠀲󠀳󠁿 Flag for Petnjica (ME-23) 🏴󠁭󠁥󠀱󠀷󠁿 Flag for Rožaje (ME-17) 🏴󠁭󠁥󠀰󠀵󠁿 Flag for Budva (ME-05) 🏴󠁭󠁥󠀰󠀲󠁿 Flag for Bar (ME-02) 🏴󠁭󠁫󠀰󠀳󠁿 Flag for Berovo (MK-03) 🏴󠁭󠁥󠀱󠀹󠁿 Flag for Tivat (ME-19) 🏴󠁭󠁥󠀱󠀵󠁿 Flag for Plužine (ME-15) 🏴󠁭󠁥󠀱󠀰󠁿 Flag for Kotor (ME-10) 🏴󠁭󠁨󠁬󠁿 Flag for Ralik Chain (MH-L) 🏴󠁭󠁥󠀰󠀷󠁿 Flag for Danilovgrad (ME-07) 🏴󠁭󠁥󠀱󠀳󠁿 Flag for Plav (ME-13) 🏴󠁭󠁫󠀰󠀴󠁿 Flag for Bitola (MK-04) 🏴󠁭󠁥󠀰󠀴󠁿 Flag for Bijelo Polje (ME-04) 🏴󠁭󠁥󠀰󠀱󠁿 Flag for Andrijevica (ME-01) 👩🏿‍👩🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁭󠁥󠀱󠀲󠁿 Flag for Nikšić (ME-12) 🏴󠁭󠁤󠁴󠁡󠁿 Flag for Taraclia (MD-TA) 🏴󠁭󠁥󠀱󠀱󠁿 Flag for Mojkovac (ME-11) 🏴󠁭󠁧󠁭󠁿 Flag for Mahajanga (MG-M) 🏴󠁭󠁥󠀲󠀲󠁿 Flag for Gusinje (ME-22) 🏴󠁭󠁧󠁦󠁿 Flag for Fianarantsoa (MG-F) 🏴󠁭󠁥󠀱󠀸󠁿 Flag for Šavnik (ME-18) 🏴󠁭󠁥󠀱󠀶󠁿 Flag for Podgorica (ME-16) 🏴󠁭󠁧󠁵󠁿 Flag for Toliara (MG-U) 🏴󠁭󠁧󠁤󠁿 Flag for Antsiranana (MG-D) 🏴󠁭󠁫󠀴󠀳󠁿 Flag for Kratovo (MK-43) 🏴󠁭󠁫󠀴󠀴󠁿 Flag for Kriva Palanka (MK-44) 🏴󠁭󠁫󠀵󠀲󠁿 Flag for Makedonski Brod (MK-52) 🏴󠁭󠁫󠀳󠀵󠁿 Flag for Jegunovce (MK-35) 🏴󠁭󠁫󠀴󠀹󠁿 Flag for Lozovo (MK-49) 🏴󠁭󠁫󠀴󠀷󠁿 Flag for Kumanovo (MK-47) 🏴󠁭󠁫󠀱󠀲󠁿 Flag for Vevčani (MK-12) 🏴󠁭󠁫󠀲󠀴󠁿 Flag for Demir Kapija (MK-24) 🏴󠁭󠁫󠀱󠀱󠁿 Flag for Vasilevo (MK-11) 🏴󠁭󠁫󠀳󠀰󠁿 Flag for Želino (MK-30) 🏴󠁭󠁫󠀳󠀶󠁿 Flag for Kavadarci (MK-36) 🏴󠁭󠁫󠀳󠀲󠁿 Flag for Zelenikovo (MK-32) 🏴󠁭󠁫󠀴󠀱󠁿 Flag for Konče (MK-41) 🏴󠁭󠁫󠀱󠀴󠁿 Flag for Vinica (MK-14) 🏴󠁭󠁫󠀱󠀰󠁿 Flag for Valandovo (MK-10) 🏴󠁭󠁫󠀵󠀵󠁿 Flag for Novaci (MK-55) 🏴󠁭󠁫󠀵󠀶󠁿 Flag for Novo Selo (MK-56) 🏴󠁭󠁫󠀳󠀴󠁿 Flag for Ilinden (MK-34) 🏴󠁭󠁫󠀵󠀱󠁿 Flag for Makedonska Kamenica (MK-51) 🏴󠁭󠁫󠀱󠀶󠁿 Flag for Vrapčište (MK-16) 🏴󠁭󠁫󠀰󠀸󠁿 Flag for Brvenica (MK-08) 🏴󠁭󠁫󠀲󠀰󠁿 Flag for Gradsko (MK-20) 🏴󠁭󠁫󠀵󠀰󠁿 Flag for Mavrovo and Rostuša (MK-50) 🏴󠁭󠁫󠀲󠀲󠁿 Flag for Debarca (MK-22) 🏴󠁭󠁫󠀱󠀹󠁿 Flag for Gostivar (MK-19) 🏴󠁭󠁫󠀵󠀳󠁿 Flag for Mogila (MK-53) 🏴󠁭󠁫󠀴󠀸󠁿 Flag for Lipkovo (MK-48) 🏴󠁭󠁫󠀳󠀷󠁿 Flag for Karbinci (MK-37) 🏴󠁭󠁫󠀳󠀳󠁿 Flag for Zrnovci (MK-33) 🏴󠁭󠁫󠀵󠀴󠁿 Flag for Negotino (MK-54) 🏴󠁭󠁫󠀴󠀰󠁿 Flag for Kičevo (MK-40) 🏴󠁭󠁫󠀲󠀱󠁿 Flag for Debar (MK-21) 🏴󠁭󠁫󠀱󠀳󠁿 Flag for Veles (MK-13) 🏴󠁭󠁫󠀲󠀶󠁿 Flag for Dojran (MK-26) 🏴󠁭󠁫󠀱󠀸󠁿 Flag for Gevgelija (MK-18) 🏴󠁭󠁫󠀴󠀲󠁿 Flag for Kočani (MK-42) 🏴󠁭󠁫󠀴󠀵󠁿 Flag for Krivogaštani (MK-45) 🏴󠁭󠁫󠀲󠀳󠁿 Flag for Delčevo (MK-23) 🏴󠁭󠁫󠀴󠀶󠁿 Flag for Kruševo (MK-46) 🏴󠁭󠁫󠀸󠀲󠁿 Flag for Čučer-Sandevo (MK-82) 🏴󠁭󠁫󠀶󠀲󠁿 Flag for Prilep (MK-62) 🏴󠁭󠁫󠀷󠀸󠁿 Flag for Centar Župa (MK-78) 🏴󠁭󠁭󠀰󠀴󠁿 Flag for Mandalay (MM-04) 🏴󠁭󠁬󠀴󠁿 Flag for Ségou (ML-4) 🏴󠁭󠁫󠀵󠀹󠁿 Flag for Petrovec (MK-59) 🏴󠁭󠁫󠀸󠀱󠁿 Flag for Češinovo-Obleševo (MK-81) 🏴󠁭󠁬󠀸󠁿 Flag for Kidal (ML-8) 🏴󠁭󠁭󠀰󠀲󠁿 Flag for Bago (MM-02) 🏴󠁭󠁫󠀷󠀲󠁿 Flag for Struga (MK-72) 🏴󠁭󠁫󠀷󠀵󠁿 Flag for Tearce (MK-75) 🏴󠁭󠁫󠀷󠀴󠁿 Flag for Studeničani (MK-74) 🏴󠁭󠁫󠀵󠀸󠁿 Flag for Ohrid (MK-58) 🏴󠁭󠁫󠀶󠀹󠁿 Flag for Sveti Nikole (MK-69) 🏴󠁭󠁫󠀷󠀳󠁿 Flag for Strumica (MK-73) 🏴󠁭󠁬󠀳󠁿 Flag for Sikasso (ML-3) 🏴󠁭󠁭󠀱󠀱󠁿 Flag for Kachin (MM-11) 🏴󠁭󠁫󠀶󠀶󠁿 Flag for Resen (MK-66) 🏴󠁭󠁬󠁢󠁫󠁯󠁿 Flag for Bamako (ML-BKO) 🏴󠁭󠁭󠀰󠀳󠁿 Flag for Magway (MM-03) 🏴󠁭󠁫󠀷󠀰󠁿 Flag for Sopište (MK-70) 🏴󠁭󠁫󠀷󠀱󠁿 Flag for Staro Nagoričane (MK-71) 🏴󠁭󠁭󠀰󠀷󠁿 Flag for Ayeyarwady (MM-07) 🏴󠁭󠁬󠀷󠁿 Flag for Gao (ML-7) 🏴󠁭󠁬󠀵󠁿 Flag for Mopti (ML-5) 🏴󠁭󠁫󠀸󠀳󠁿 Flag for Štip (MK-83) 🏴󠁭󠁭󠀱󠀲󠁿 Flag for Kayah (MM-12) 🏴󠁭󠁭󠀰󠀵󠁿 Flag for Tanintharyi (MM-05) 🏴󠁭󠁬󠀲󠁿 Flag for Koulikoro (ML-2) 🏴󠁭󠁫󠀶󠀳󠁿 Flag for Probištip (MK-63) 🏴󠁭󠁫󠀶󠀰󠁿 Flag for Pehčevo (MK-60) 🏴󠁭󠁭󠀰󠀱󠁿 Flag for Sagaing (MM-01) 🏴󠁭󠁫󠀸󠀰󠁿 Flag for Čaška (MK-80) 🏴󠁭󠁫󠀶󠀵󠁿 Flag for Rankovce (MK-65) 🏴󠁭󠁭󠀰󠀶󠁿 Flag for Yangon (MM-06) 🏴󠁭󠁫󠀷󠀶󠁿 Flag for Tetovo (MK-76) 🏴󠁭󠁫󠀶󠀷󠁿 Flag for Rosoman (MK-67) 🏴󠁭󠁲󠀰󠀳󠁿 Flag for Assaba (MR-03) 🏴󠁭󠁭󠀱󠀷󠁿 Flag for Shan (MM-17) 🏴󠁭󠁭󠀱󠀶󠁿 Flag for Rakhine (MM-16) 🏴󠁭󠁮󠀰󠀴󠀱󠁿 Flag for Khövsgöl (MN-041) 🏴󠁭󠁮󠀰󠀷󠀱󠁿 Flag for Bayan-Ölgii (MN-071) 🏴󠁭󠁮󠀰󠀶󠀹󠁿 Flag for Bayankhongor (MN-069) 🏴󠁭󠁮󠀰󠀶󠀱󠁿 Flag for Dornod (MN-061) 🏴󠁭󠁮󠀰󠀴󠀹󠁿 Flag for Selenge (MN-049) 🏴󠁭󠁮󠀱󠁿 Flag for Ulaanbaatar (MN-1) 🏴󠁭󠁮󠀰󠀳󠀷󠁿 Flag for Darkhan-Uul (MN-037) 🏴󠁭󠁮󠀰󠀴󠀷󠁿 Flag for Töv (MN-047) 🏴󠁭󠁭󠀱󠀵󠁿 Flag for Mon (MM-15) 🏴󠁭󠁲󠀰󠀶󠁿 Flag for Trarza (MR-06) 🏴󠁭󠁮󠀰󠀵󠀱󠁿 Flag for Sükhbaatar (MN-051) 🏴󠁭󠁲󠀰󠀴󠁿 Flag for Gorgol (MR-04) 🏴󠁭󠁮󠀰󠀵󠀵󠁿 Flag for Övörkhangai (MN-055) 🏴󠁭󠁭󠀱󠀴󠁿 Flag for Chin (MM-14) 🏴󠁭󠁮󠀰󠀶󠀷󠁿 Flag for Bulgan (MN-067) 🏴󠁭󠁮󠀰󠀵󠀷󠁿 Flag for Zavkhan (MN-057) 🏴󠁭󠁮󠀰󠀶󠀳󠁿 Flag for Dornogovi (MN-063) 🏴󠁭󠁮󠀰󠀵󠀳󠁿 Flag for Ömnögovi (MN-053) 🏴󠁭󠁭󠀱󠀳󠁿 Flag for Kayin (MM-13) 🏴󠁭󠁮󠀰󠀶󠀵󠁿 Flag for Govi-Altai (MN-065) 🏴󠁭󠁲󠀱󠀱󠁿 Flag for Tiris Zemmour (MR-11) 🏴󠁭󠁮󠀰󠀵󠀹󠁿 Flag for Dundgovi (MN-059) 🏴󠁭󠁮󠀰󠀷󠀳󠁿 Flag for Arkhangai (MN-073) 🏴󠁭󠁲󠀰󠀹󠁿 Flag for Tagant (MR-09) 🏴󠁭󠁮󠀰󠀴󠀳󠁿 Flag for Khovd (MN-043) 🏴󠁭󠁮󠀰󠀴󠀶󠁿 Flag for Uvs (MN-046) 🏴󠁭󠁮󠀰󠀶󠀴󠁿 Flag for Govisümber (MN-064) 🏴󠁭󠁲󠀰󠀵󠁿 Flag for Brakna (MR-05) 🏴󠁭󠁲󠀰󠀸󠁿 Flag for Dakhlet Nouadhibou (MR-08) 🏴󠁭󠁲󠀰󠀱󠁿 Flag for Hodh Ech Chargui (MR-01) 🏴󠁭󠁮󠀰󠀳󠀵󠁿 Flag for Orkhon (MN-035) 🏴󠁭󠁲󠀰󠀲󠁿 Flag for Hodh El Gharbi (MR-02) 🏴󠁭󠁭󠀱󠀸󠁿 Flag for Naypyidaw (MM-18) 🏴󠁭󠁲󠀰󠀷󠁿 Flag for Adrar (MR-07) 🏴󠁭󠁲󠀱󠀲󠁿 Flag for Inchiri (MR-12) 🏴󠁭󠁴󠀱󠀹󠁿 Flag for Iklin (MT-19) 🏴󠁭󠁴󠀱󠀴󠁿 Flag for Għarb (MT-14) 🏴󠁭󠁴󠀳󠀳󠁿 Flag for Mqabba (MT-33) 🏴󠁭󠁴󠀲󠀲󠁿 Flag for Kerċem (MT-22) 🏴󠁭󠁴󠀱󠀶󠁿 Flag for Għasri (MT-16) 🏴󠁭󠁴󠀲󠀴󠁿 Flag for Lija (MT-24) 🏴󠁭󠁴󠀰󠀵󠁿 Flag for Birżebbuġa (MT-05) 🏴󠁭󠁴󠀰󠀴󠁿 Flag for Birkirkara (MT-04) 🏴󠁭󠁴󠀳󠀱󠁿 Flag for Mġarr (MT-31) 🏴󠁭󠁴󠀰󠀲󠁿 Flag for Balzan (MT-02) 🏴󠁭󠁴󠀳󠀶󠁿 Flag for Munxar (MT-36) 🏴󠁭󠁴󠀱󠀳󠁿 Flag for Għajnsielem (MT-13) 🏴󠁭󠁴󠀳󠀸󠁿 Flag for Naxxar (MT-38) 🏴󠁭󠁴󠀰󠀹󠁿 Flag for Floriana (MT-09) 🏴󠁭󠁴󠀲󠀶󠁿 Flag for Marsa (MT-26) 🏴󠁭󠁴󠀰󠀷󠁿 Flag for Dingli (MT-07) 🏴󠁭󠁴󠀱󠀱󠁿 Flag for Gudja (MT-11) 🏴󠁭󠁴󠀲󠀳󠁿 Flag for Kirkop (MT-23) 🏴󠁭󠁴󠀲󠀷󠁿 Flag for Marsaskala (MT-27) 🏴󠁭󠁴󠀳󠀹󠁿 Flag for Paola (MT-39) 🏴󠁭󠁴󠀱󠀰󠁿 Flag for Fontana (MT-10) 🏴󠁭󠁴󠀳󠀴󠁿 Flag for Msida (MT-34) 🏴󠁭󠁴󠀳󠀷󠁿 Flag for Nadur (MT-37) 🏴󠁭󠁴󠀳󠀲󠁿 Flag for Mosta (MT-32) 🏴󠁭󠁴󠀳󠀵󠁿 Flag for Imtarfa (MT-35) 🏴󠁭󠁴󠀰󠀶󠁿 Flag for Cospicua (MT-06) 🏴󠁭󠁴󠀰󠀳󠁿 Flag for Birgu (MT-03) 🏴󠁭󠁲󠀱󠀴󠁿 Flag for Nouakchott Nord (MR-14) 🏴󠁭󠁴󠀱󠀲󠁿 Flag for Gżira (MT-12) 🏴󠁭󠁴󠀳󠀰󠁿 Flag for Mellieħa (MT-30) 🏴󠁭󠁴󠀱󠀷󠁿 Flag for Għaxaq (MT-17) 🏴󠁭󠁴󠀱󠀸󠁿 Flag for Ħamrun (MT-18) 🏴󠁭󠁴󠀰󠀸󠁿 Flag for Fgura (MT-08) 🏴󠁭󠁴󠀰󠀱󠁿 Flag for Attard (MT-01) 🏴󠁭󠁴󠀱󠀵󠁿 Flag for Għargħur (MT-15) 🏴󠁭󠁴󠀲󠀱󠁿 Flag for Kalkara (MT-21) 🏴󠁭󠁲󠀱󠀵󠁿 Flag for Nouakchott Sud (MR-15) 🏴󠁭󠁴󠀲󠀸󠁿 Flag for Marsaxlokk (MT-28) 🏴󠁭󠁴󠀴󠀵󠁿 Flag for Victoria (MT-45) 🏴󠁭󠁴󠀴󠀲󠁿 Flag for Qala (MT-42) 🏴󠁭󠁴󠀶󠀴󠁿 Flag for Żabbar (MT-64) 🏴󠁭󠁵󠁡󠁧󠁿 Flag for Agaléga (MU-AG) 🏴󠁭󠁴󠀵󠀸󠁿 Flag for Ta’ Xbiex (MT-58) 🏴󠁭󠁴󠀴󠀱󠁿 Flag for Pietà (MT-41) 🏴󠁭󠁴󠀵󠀲󠁿 Flag for Sannat (MT-52) 🏴󠁭󠁵󠁰󠁬󠁿 Flag for Port Louis District (MU-PL) 🏴󠁭󠁴󠀶󠀱󠁿 Flag for Xagħra (MT-61) 🏴󠁭󠁵󠁢󠁬󠁿 Flag for Rivière Noire (MU-BL) 🏴󠁭󠁴󠀵󠀶󠁿 Flag for Sliema (MT-56) 🏴󠁭󠁴󠀴󠀷󠁿 Flag for Safi (MT-47) 🏴󠁭󠁵󠁦󠁬󠁿 Flag for Flacq (MU-FL) 🏴󠁭󠁴󠀴󠀰󠁿 Flag for Pembroke (MT-40) 🏴󠁭󠁴󠀵󠀷󠁿 Flag for Swieqi (MT-57) 🏴󠁭󠁵󠁣󠁵󠁿 Flag for Curepipe (MU-CU) 🏴󠁭󠁴󠀶󠀸󠁿 Flag for Żurrieq (MT-68) 🏴󠁭󠁴󠀴󠀹󠁿 Flag for San Ġwann (MT-49) 🏴󠁭󠁵󠁧󠁰󠁿 Flag for Grand Port (MU-GP) 🏴󠁭󠁵󠁣󠁣󠁿 Flag for Cargados Carajos (MU-CC) 🏴󠁭󠁴󠀴󠀴󠁿 Flag for Qrendi (MT-44) 🏴󠁭󠁴󠀶󠀰󠁿 Flag for Valletta (MT-60) 🏴󠁭󠁵󠁰󠁡󠁿 Flag for Pamplemousses (MU-PA) 🏴󠁭󠁴󠀴󠀳󠁿 Flag for Qormi (MT-43) 🏴󠁭󠁵󠁰󠁵󠁿 Flag for Port Louis (MU-PU) 🏴󠁭󠁴󠀵󠀹󠁿 Flag for Tarxien (MT-59) 🏴󠁭󠁴󠀶󠀵󠁿 Flag for Żebbuġ Gozo (MT-65) 🏴󠁭󠁴󠀵󠀰󠁿 Flag for Saint Lawrence (MT-50) 🏴󠁭󠁴󠀶󠀷󠁿 Flag for Żejtun (MT-67) 🏴󠁭󠁴󠀵󠀱󠁿 Flag for St. Paul’s Bay (MT-51) 🏴󠁭󠁴󠀵󠀳󠁿 Flag for Santa Luċija (MT-53) 🏴󠁭󠁴󠀶󠀶󠁿 Flag for Żebbuġ (MT-66) 🏴󠁭󠁴󠀴󠀶󠁿 Flag for Rabat (MT-46) 🏴󠁭󠁴󠀵󠀵󠁿 Flag for Siġġiewi (MT-55) 👩🏽‍👩🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁭󠁴󠀵󠀴󠁿 Flag for Santa Venera (MT-54) 🏴󠁭󠁴󠀶󠀳󠁿 Flag for Xgħajra (MT-63) 🏴󠁭󠁵󠁭󠁯󠁿 Flag for Moka (MU-MO) 🏴󠁭󠁸󠁭󠁩󠁣󠁿 Flag for Michoacán (MX-MIC) 🏴󠁭󠁷󠁮󠁿 Flag for Northern (MW-N) 🏴󠁭󠁶󠁵󠁮󠁿 Flag for Upper North Province (MV-UN) 🏴󠁭󠁸󠁣󠁯󠁬󠁿 Flag for Colima (MX-COL) 🏴󠁭󠁵󠁲󠁯󠁿 Flag for Rodrigues (MU-RO) 🏴󠁭󠁸󠁧󠁵󠁡󠁿 Flag for Guanajuato (MX-GUA) 🏴󠁭󠁸󠁣󠁭󠁸󠁿 Flag for Ciudad de Mexico (MX-CMX) 🏴󠁭󠁸󠁰󠁵󠁥󠁿 Flag for Puebla (MX-PUE) 🏴󠁭󠁵󠁱󠁢󠁿 Flag for Quatre Bornes (MU-QB) 🏴󠁭󠁸󠁯󠁡󠁸󠁿 Flag for Oaxaca (MX-OAX) 🏴󠁭󠁷󠁣󠁿 Flag for Central (MW-C) 🏴󠁭󠁵󠁳󠁡󠁿 Flag for Savanne (MU-SA) 🏴󠁭󠁸󠁭󠁯󠁲󠁿 Flag for Morelos (MX-MOR) 🏴󠁭󠁸󠁨󠁩󠁤󠁿 Flag for Hidalgo (MX-HID) 🏴󠁭󠁸󠁡󠁧󠁵󠁿 Flag for Aguascalientes (MX-AGU) 🏴󠁭󠁸󠁣󠁡󠁭󠁿 Flag for Campeche (MX-CAM) 🏴󠁭󠁸󠁮󠁬󠁥󠁿 Flag for Nuevo León (MX-NLE) 🏴󠁭󠁶󠁭󠁬󠁥󠁿 Flag for Malé (MV-MLE) 🏴󠁭󠁸󠁧󠁲󠁯󠁿 Flag for Guerrero (MX-GRO) 🏴󠁭󠁵󠁶󠁰󠁿 Flag for Vacoas-Phoenix (MU-VP) 👨🏻‍👨🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁭󠁶󠁮󠁣󠁿 Flag for North Central Province (MV-NC) 🏴󠁭󠁸󠁭󠁥󠁸󠁿 Flag for Mexico State (MX-MEX) 🏴󠁭󠁵󠁰󠁷󠁿 Flag for Plaines Wilhems (MU-PW) 🏴󠁭󠁶󠁣󠁥󠁿 Flag for Central Province (MV-CE) 🏴󠁭󠁸󠁣󠁯󠁡󠁿 Flag for Coahuila (MX-COA) 🏴󠁭󠁶󠁳󠁵󠁿 Flag for South Province (MV-SU) 🏴󠁭󠁸󠁣󠁨󠁰󠁿 Flag for Chiapas (MX-CHP) 🏴󠁭󠁷󠁳󠁿 Flag for Southern (MW-S) 🏴󠁭󠁺󠁳󠁿 Flag for Sofala (MZ-S) 🏴󠁭󠁹󠀰󠀹󠁿 Flag for Perlis (MY-09) 🏴󠁭󠁸󠁶󠁥󠁲󠁿 Flag for Veracruz (MX-VER) 🏴󠁭󠁹󠀱󠀳󠁿 Flag for Sarawak (MY-13) 🏴󠁭󠁹󠀰󠀳󠁿 Flag for Kelantan (MY-03) 🏴󠁮󠁡󠁣󠁡󠁿 Flag for Zambezi (NA-CA) 🏴󠁭󠁺󠁢󠁿 Flag for Manica (MZ-B) 🏴󠁭󠁹󠀱󠀵󠁿 Flag for Labuan (MY-15) 🏴󠁭󠁺󠁰󠁿 Flag for Cabo Delgado (MZ-P) 🏴󠁮󠁡󠁨󠁡󠁿 Flag for Hardap (NA-HA) 🏴󠁭󠁺󠁴󠁿 Flag for Tete (MZ-T) 🏴󠁭󠁹󠀰󠀲󠁿 Flag for Kedah (MY-02) 🏴󠁭󠁹󠀰󠀶󠁿 Flag for Pahang (MY-06) 🏴󠁭󠁹󠀰󠀷󠁿 Flag for Penang (MY-07) 🏴󠁭󠁹󠀰󠀸󠁿 Flag for Perak (MY-08) 🏴󠁭󠁺󠁬󠁿 Flag for Maputo Province (MZ-L) 🏴󠁢󠁲󠁧󠁯󠁿 Flag for Goiás (BR-GO) 🏴󠁭󠁹󠀱󠀱󠁿 Flag for Terengganu (MY-11) 🏴󠁭󠁺󠁩󠁿 Flag for Inhambane (MZ-I) 🏴󠁭󠁹󠀰󠀴󠁿 Flag for Malacca (MY-04) 🏴󠁮󠁡󠁥󠁲󠁿 Flag for Erongo (NA-ER) 🏴󠁭󠁸󠁴󠁬󠁡󠁿 Flag for Tlaxcala (MX-TLA) 🏴󠁭󠁹󠀰󠀵󠁿 Flag for Negeri Sembilan (MY-05) 🏴󠁭󠁸󠁺󠁡󠁣󠁿 Flag for Zacatecas (MX-ZAC) 🏴󠁭󠁸󠁴󠁡󠁭󠁿 Flag for Tamaulipas (MX-TAM) 🏴󠁭󠁺󠁡󠁿 Flag for Niassa (MZ-A) 🏴󠁭󠁺󠁭󠁰󠁭󠁿 Flag for Maputo (MZ-MPM) 🏴󠁭󠁺󠁮󠁿 Flag for Nampula (MZ-N) 🏴󠁭󠁹󠀱󠀶󠁿 Flag for Putrajaya (MY-16) 🏴󠁭󠁸󠁳󠁩󠁮󠁿 Flag for Sinaloa (MX-SIN) 🏴󠁭󠁸󠁹󠁵󠁣󠁿 Flag for Yucatán (MX-YUC) 🏴󠁭󠁹󠀱󠀲󠁿 Flag for Sabah (MY-12) 👩🏼‍👩🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁭󠁺󠁱󠁿 Flag for Zambezia (MZ-Q) 🏴󠁭󠁸󠁱󠁵󠁥󠁿 Flag for Querétaro (MX-QUE) 🏴󠁭󠁺󠁧󠁿 Flag for Gaza (MZ-G) 🏴󠁮󠁡󠁯󠁤󠁿 Flag for Otjozondjupa (NA-OD) 🏴󠁮󠁥󠀴󠁿 Flag for Maradi (NE-4) 🏴󠁮󠁡󠁫󠁵󠁿 Flag for Kunene (NA-KU) 🏴󠁮󠁧󠁡󠁫󠁿 Flag for Akwa Ibom (NG-AK) 🏴󠁮󠁥󠀵󠁿 Flag for Tahoua (NE-5) 🏴󠁭󠁵󠁲󠁲󠁿 Flag for Rivière du Rempart (MU-RR) 🏴󠁮󠁧󠁩󠁭󠁿 Flag for Imo (NG-IM) 🏴󠁮󠁧󠁫󠁴󠁿 Flag for Katsina (NG-KT) 🏴󠁮󠁥󠀳󠁿 Flag for Dosso (NE-3) 🏴󠁮󠁥󠀶󠁿 Flag for Tillabéri (NE-6) 🏴󠁮󠁧󠁥󠁫󠁿 Flag for Ekiti (NG-EK) 🏴󠁮󠁡󠁯󠁨󠁿 Flag for Omaheke (NA-OH) 🏴󠁮󠁧󠁢󠁡󠁿 Flag for Bauchi (NG-BA) 🏴󠁮󠁡󠁫󠁡󠁿 Flag for Karas (NA-KA) 🏴󠁮󠁧󠁢󠁹󠁿 Flag for Bayelsa (NG-BY) 🏴󠁮󠁡󠁯󠁷󠁿 Flag for Ohangwena (NA-OW) 🏴󠁮󠁧󠁢󠁥󠁿 Flag for Benue (NG-BE) 🏴󠁮󠁧󠁥󠁮󠁿 Flag for Enugu (NG-EN) 🏴󠁮󠁡󠁯󠁮󠁿 Flag for Oshana (NA-ON) 🏴󠁮󠁧󠁫󠁤󠁿 Flag for Kaduna (NG-KD) 👨🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 🏴󠁮󠁧󠁫󠁥󠁿 Flag for Kebbi (NG-KE) 🏴󠁮󠁧󠁪󠁩󠁿 Flag for Jigawa (NG-JI) 🏴󠁮󠁥󠀸󠁿 Flag for Niamey (NE-8) 🏴󠁮󠁧󠁡󠁮󠁿 Flag for Anambra (NG-AN) 🏴󠁮󠁧󠁧󠁯󠁿 Flag for Gombe (NG-GO) 🏴󠁮󠁥󠀱󠁿 Flag for Agadez (NE-1) 🏴󠁮󠁡󠁫󠁨󠁿 Flag for Khomas (NA-KH) 🏴󠁮󠁥󠀲󠁿 Flag for Diffa (NE-2) 🏴󠁭󠁹󠀰󠀱󠁿 Flag for Johor (MY-01) 🏴󠁮󠁧󠁫󠁮󠁿 Flag for Kano (NG-KN) 🏴󠁮󠁡󠁯󠁳󠁿 Flag for Omusati (NA-OS) 🏴󠁮󠁧󠁫󠁯󠁿 Flag for Kogi (NG-KO) 🏴󠁮󠁧󠁥󠁤󠁿 Flag for Edo (NG-ED) 🏴󠁮󠁧󠁡󠁢󠁿 Flag for Abia (NG-AB) 🏴󠁮󠁡󠁯󠁴󠁿 Flag for Oshikoto (NA-OT) 🏴󠁮󠁡󠁫󠁷󠁿 Flag for Kavango West (NA-KW) 🏴󠁮󠁧󠁥󠁢󠁿 Flag for Ebonyi (NG-EB) 🏴󠁮󠁥󠀷󠁿 Flag for Zinder (NE-7) 🏴󠁮󠁩󠁪󠁩󠁿 Flag for Jinotega (NI-JI) 🏴󠁮󠁧󠁮󠁡󠁿 Flag for Nasarawa (NG-NA) 🏴󠁮󠁬󠁦󠁲󠁿 Flag for Friesland (NL-FR) 🏴󠁮󠁧󠁳󠁯󠁿 Flag for Sokoto (NG-SO) 🏴󠁮󠁩󠁲󠁩󠁿 Flag for Rivas (NI-RI) 🏴󠁮󠁩󠁮󠁳󠁿 Flag for Nueva Segovia (NI-NS) 🏴󠁮󠁧󠁰󠁬󠁿 Flag for Plateau (NG-PL) 🏴󠁮󠁧󠁹󠁯󠁿 Flag for Yobe (NG-YO) 🏴󠁮󠁬󠁢󠁱󠀱󠁿 Flag for Bonaire (NL-BQ1) 🏴󠁮󠁩󠁡󠁮󠁿 Flag for Atlántico Norte (NI-AN) 🏴󠁮󠁧󠁺󠁡󠁿 Flag for Zamfara (NG-ZA) 🏴󠁮󠁬󠁧󠁥󠁿 Flag for Gelderland (NL-GE) 🏴󠁮󠁧󠁯󠁹󠁿 Flag for Oyo (NG-OY) 🏴󠁮󠁩󠁭󠁤󠁿 Flag for Madriz (NI-MD) 🏴󠁮󠁩󠁣󠁩󠁿 Flag for Chinandega (NI-CI) 🏴󠁮󠁧󠁯󠁮󠁿 Flag for Ondo (NG-ON) 👨🏽‍👨🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁤󠁥󠁮󠁷󠁿 Flag for North Rhine-Westphalia (DE-NW) 🏴󠁮󠁧󠁬󠁡󠁿 Flag for Lagos (NG-LA) 🏴󠁮󠁩󠁭󠁮󠁿 Flag for Managua (NI-MN) 🏴󠁮󠁩󠁡󠁳󠁿 Flag for Atlántico Sur (NI-AS) 🏴󠁮󠁬󠁣󠁷󠁿 Flag for Curaçao (NL-CW) 🏴󠁮󠁩󠁢󠁯󠁿 Flag for Boaco (NI-BO) 🏴󠁮󠁧󠁲󠁩󠁿 Flag for Rivers (NG-RI) 🏴󠁮󠁩󠁧󠁲󠁿 Flag for Granada (NI-GR) 🏴󠁮󠁩󠁣󠁯󠁿 Flag for Chontales (NI-CO) 🏴󠁮󠁬󠁧󠁲󠁿 Flag for Groningen (NL-GR) 🏴󠁮󠁬󠁢󠁱󠀳󠁿 Flag for Sint Eustatius (NL-BQ3) 🏴󠁮󠁩󠁳󠁪󠁿 Flag for Río San Juan (NI-SJ) 🏴󠁮󠁧󠁯󠁳󠁿 Flag for Osun (NG-OS) 🏴󠁮󠁧󠁴󠁡󠁿 Flag for Taraba (NG-TA) 🏴󠁮󠁬󠁦󠁬󠁿 Flag for Flevoland (NL-FL) 🏴󠁮󠁩󠁭󠁴󠁿 Flag for Matagalpa (NI-MT) 🏴󠁮󠁬󠁤󠁲󠁿 Flag for Drenthe (NL-DR) 🏴󠁮󠁩󠁣󠁡󠁿 Flag for Carazo (NI-CA) 🏴󠁮󠁧󠁫󠁷󠁿 Flag for Kwara (NG-KW) 🏴󠁮󠁧󠁮󠁩󠁿 Flag for Niger (NG-NI) 🏴󠁮󠁩󠁥󠁳󠁿 Flag for Estelí (NI-ES) 🏴󠁮󠁬󠁺󠁨󠁿 Flag for South Holland (NL-ZH) """ for line in emojis.splitlines(): words = line.split() char = words[0] desc = " ".join(words[1:]) print("{}\t:{}".format(desc, char))
#!/usr/bin/python3 # -*- coding: utf-8 -*- from subprocess import Popen, PIPE emojis="""⛑🏻 Helmet With White Cross, Type-1-2 ⛑🏼 Helmet With White Cross, Type-3 ⛑🏽 Helmet With White Cross, Type-4 ⛑🏾 Helmet With White Cross, Type-5 ⛑🏿 Helmet With White Cross, Type-6 💏🏻 Kiss, Type-1-2 💏🏼 Kiss, Type-3 💏🏽 Kiss, Type-4 💏🏾 Kiss, Type-5 💏🏿 Kiss, Type-6 💑🏻 Couple With Heart, Type-1-2 💑🏼 Couple With Heart, Type-3 💑🏽 Couple With Heart, Type-4 💑🏾 Couple With Heart, Type-5 💑🏿 Couple With Heart, Type-6 ⛷🏻 Skier, Type-1-2 ⛷🏼 Skier, Type-3 ⛷🏽 Skier, Type-4 ⛷🏾 Skier, Type-5 ⛷🏿 Skier, Type-6 😀 Grinning Face 😁 Grinning Face With Smiling Eyes 😂 Face With Tears of Joy 🤣 Rolling on the Floor Laughing 😃 Smiling Face With Open Mouth 😄 Smiling Face With Open Mouth & Smiling Eyes 😅 Smiling Face With Open Mouth & Cold Sweat 😆 Smiling Face With Open Mouth & Closed Eyes 😉 Winking Face 😊 Smiling Face With Smiling Eyes 😋 Face Savouring Delicious Food 😎 Smiling Face With Sunglasses 😍 Smiling Face With Heart-Eyes 😘 Face Blowing a Kiss 😗 Kissing Face 😙 Kissing Face With Smiling Eyes 😚 Kissing Face With Closed Eyes ☺ Smiling Face 🙂 Slightly Smiling Face 🤗 Hugging Face 🤩 Star-Struck 🤔 Thinking Face 🤨 Face With Raised Eyebrow 😐 Neutral Face 😑 Expressionless Face 😶 Face Without Mouth 🙄 Face With Rolling Eyes 😏 Smirking Face 😣 Persevering Face 😥 Disappointed but Relieved Face 😮 Face With Open Mouth 🤐 Zipper-Mouth Face 😯 Hushed Face 😪 Sleepy Face 😫 Tired Face 😴 Sleeping Face 😌 Relieved Face 😛 Face With Stuck-Out Tongue 😜 Face With Stuck-Out Tongue & Winking Eye 😝 Face With Stuck-Out Tongue & Closed Eyes 🤤 Drooling Face 😒 Unamused Face 😓 Face With Cold Sweat 😔 Pensive Face 😕 Confused Face 🙃 Upside-Down Face 🤑 Money-Mouth Face 😲 Astonished Face ☹ Frowning Face 🙁 Slightly Frowning Face 😖 Confounded Face 😞 Disappointed Face 😟 Worried Face 😤 Face With Steam From Nose 😢 Crying Face 😭 Loudly Crying Face 😦 Frowning Face With Open Mouth 😧 Anguished Face 😨 Fearful Face 😩 Weary Face 🤯 Exploding Head 😬 Grimacing Face 😰 Face With Open Mouth & Cold Sweat 😱 Face Screaming in Fear 😳 Flushed Face 🤪 Crazy Face 😵 Dizzy Face 😡 Pouting Face 😠 Angry Face 🤬 Face With Symbols Over Mouth 😷 Face With Medical Mask 🤒 Face With Thermometer 🤕 Face With Head-Bandage 🤢 Nauseated Face 🤮 Face Vomiting 🤧 Sneezing Face 😇 Smiling Face With Halo 🤠 Cowboy Hat Face 🤡 Clown Face 🤥 Lying Face 🤫 Shushing Face 🤭 Face With Hand Over Mouth 🧐 Face With Monocle 🤓 Nerd Face 😈 Smiling Face With Horns 👿 Angry Face With Horns 👹 Ogre 👺 Goblin 💀 Skull ☠ Skull and Crossbones 👻 Ghost 👽 Alien 👾 Alien Monster 🤖 Robot Face 💩 Pile of Poo 😺 Smiling Cat Face With Open Mouth 😸 Grinning Cat Face With Smiling Eyes 😹 Cat Face With Tears of Joy 😻 Smiling Cat Face With Heart-Eyes 😼 Cat Face With Wry Smile 😽 Kissing Cat Face With Closed Eyes 🙀 Weary Cat Face 😿 Crying Cat Face 😾 Pouting Cat Face 🙈 See-No-Evil Monkey 🙉 Hear-No-Evil Monkey 🙊 Speak-No-Evil Monkey 👶 Baby 👶🏻 Baby: Light Skin Tone 👶🏼 Baby: Medium-Light Skin Tone 👶🏽 Baby: Medium Skin Tone 👶🏾 Baby: Medium-Dark Skin Tone 👶🏿 Baby: Dark Skin Tone 🧒 Child 🧒🏻 Child: Light Skin Tone 🧒🏼 Child: Medium-Light Skin Tone 🧒🏽 Child: Medium Skin Tone 🧒🏾 Child: Medium-Dark Skin Tone 🧒🏿 Child: Dark Skin Tone 👦 Boy 👦🏻 Boy: Light Skin Tone 👦🏼 Boy: Medium-Light Skin Tone 👦🏽 Boy: Medium Skin Tone 👦🏾 Boy: Medium-Dark Skin Tone 👦🏿 Boy: Dark Skin Tone 👧 Girl 👧🏻 Girl: Light Skin Tone 👧🏼 Girl: Medium-Light Skin Tone 👧🏽 Girl: Medium Skin Tone 👧🏾 Girl: Medium-Dark Skin Tone 👧🏿 Girl: Dark Skin Tone 🧑 Adult 🧑🏻 Adult: Light Skin Tone 🧑🏼 Adult: Medium-Light Skin Tone 🧑🏽 Adult: Medium Skin Tone 🧑🏾 Adult: Medium-Dark Skin Tone 🧑🏿 Adult: Dark Skin Tone 👨 Man 👨🏻 Man: Light Skin Tone 👨🏼 Man: Medium-Light Skin Tone 👨🏽 Man: Medium Skin Tone 👨🏾 Man: Medium-Dark Skin Tone 👨🏿 Man: Dark Skin Tone 👩 Woman 👩🏻 Woman: Light Skin Tone 👩🏼 Woman: Medium-Light Skin Tone 👩🏽 Woman: Medium Skin Tone 👩🏾 Woman: Medium-Dark Skin Tone 👩🏿 Woman: Dark Skin Tone 🧓 Older Adult 🧓🏻 Older Adult: Light Skin Tone 🧓🏼 Older Adult: Medium-Light Skin Tone 🧓🏽 Older Adult: Medium Skin Tone 🧓🏾 Older Adult: Medium-Dark Skin Tone 🧓🏿 Older Adult: Dark Skin Tone 👴 Old Man 👴🏻 Old Man: Light Skin Tone 👴🏼 Old Man: Medium-Light Skin Tone 👴🏽 Old Man: Medium Skin Tone 👴🏾 Old Man: Medium-Dark Skin Tone 👴🏿 Old Man: Dark Skin Tone 👵 Old Woman 👵🏻 Old Woman: Light Skin Tone 👵🏼 Old Woman: Medium-Light Skin Tone 👵🏽 Old Woman: Medium Skin Tone 👵🏾 Old Woman: Medium-Dark Skin Tone 👵🏿 Old Woman: Dark Skin Tone 👨‍⚕️ Man Health Worker 👨🏻‍⚕️ Man Health Worker: Light Skin Tone 👨🏼‍⚕️ Man Health Worker: Medium-Light Skin Tone 👨🏽‍⚕️ Man Health Worker: Medium Skin Tone 👨🏾‍⚕️ Man Health Worker: Medium-Dark Skin Tone 👨🏿‍⚕️ Man Health Worker: Dark Skin Tone 👩‍⚕️ Woman Health Worker 👩🏻‍⚕️ Woman Health Worker: Light Skin Tone 👩🏼‍⚕️ Woman Health Worker: Medium-Light Skin Tone 👩🏽‍⚕️ Woman Health Worker: Medium Skin Tone 👩🏾‍⚕️ Woman Health Worker: Medium-Dark Skin Tone 👩🏿‍⚕️ Woman Health Worker: Dark Skin Tone 👨‍🎓 Man Student 👨🏻‍🎓 Man Student: Light Skin Tone 👨🏼‍🎓 Man Student: Medium-Light Skin Tone 👨🏽‍🎓 Man Student: Medium Skin Tone 👨🏾‍🎓 Man Student: Medium-Dark Skin Tone 👨🏿‍🎓 Man Student: Dark Skin Tone 👩‍🎓 Woman Student 👩🏻‍🎓 Woman Student: Light Skin Tone 👩🏼‍🎓 Woman Student: Medium-Light Skin Tone 👩🏽‍🎓 Woman Student: Medium Skin Tone 👩🏾‍🎓 Woman Student: Medium-Dark Skin Tone 👩🏿‍🎓 Woman Student: Dark Skin Tone 👨‍🏫 Man Teacher 👨🏻‍🏫 Man Teacher: Light Skin Tone 👨🏼‍🏫 Man Teacher: Medium-Light Skin Tone 👨🏽‍🏫 Man Teacher: Medium Skin Tone 👨🏾‍🏫 Man Teacher: Medium-Dark Skin Tone 👨🏿‍🏫 Man Teacher: Dark Skin Tone 👩‍🏫 Woman Teacher 👩🏻‍🏫 Woman Teacher: Light Skin Tone 👩🏼‍🏫 Woman Teacher: Medium-Light Skin Tone 👩🏽‍🏫 Woman Teacher: Medium Skin Tone 👩🏾‍🏫 Woman Teacher: Medium-Dark Skin Tone 👩🏿‍🏫 Woman Teacher: Dark Skin Tone 👨‍⚖️ Man Judge 👨🏻‍⚖️ Man Judge: Light Skin Tone 👨🏼‍⚖️ Man Judge: Medium-Light Skin Tone 👨🏽‍⚖️ Man Judge: Medium Skin Tone 👨🏾‍⚖️ Man Judge: Medium-Dark Skin Tone 👨🏿‍⚖️ Man Judge: Dark Skin Tone 👩‍⚖️ Woman Judge 👩🏻‍⚖️ Woman Judge: Light Skin Tone 👩🏼‍⚖️ Woman Judge: Medium-Light Skin Tone 👩🏽‍⚖️ Woman Judge: Medium Skin Tone 👩🏾‍⚖️ Woman Judge: Medium-Dark Skin Tone 👩🏿‍⚖️ Woman Judge: Dark Skin Tone 👨‍🌾 Man Farmer 👨🏻‍🌾 Man Farmer: Light Skin Tone 👨🏼‍🌾 Man Farmer: Medium-Light Skin Tone 👨🏽‍🌾 Man Farmer: Medium Skin Tone 👨🏾‍🌾 Man Farmer: Medium-Dark Skin Tone 👨🏿‍🌾 Man Farmer: Dark Skin Tone 👩‍🌾 Woman Farmer 👩🏻‍🌾 Woman Farmer: Light Skin Tone 👩🏼‍🌾 Woman Farmer: Medium-Light Skin Tone 👩🏽‍🌾 Woman Farmer: Medium Skin Tone 👩🏾‍🌾 Woman Farmer: Medium-Dark Skin Tone 👩🏿‍🌾 Woman Farmer: Dark Skin Tone 👨‍🍳 Man Cook 👨🏻‍🍳 Man Cook: Light Skin Tone 👨🏼‍🍳 Man Cook: Medium-Light Skin Tone 👨🏽‍🍳 Man Cook: Medium Skin Tone 👨🏾‍🍳 Man Cook: Medium-Dark Skin Tone 👨🏿‍🍳 Man Cook: Dark Skin Tone 👩‍🍳 Woman Cook 👩🏻‍🍳 Woman Cook: Light Skin Tone 👩🏼‍🍳 Woman Cook: Medium-Light Skin Tone 👩🏽‍🍳 Woman Cook: Medium Skin Tone 👩🏾‍🍳 Woman Cook: Medium-Dark Skin Tone 👩🏿‍🍳 Woman Cook: Dark Skin Tone 👨‍🔧 Man Mechanic 👨🏻‍🔧 Man Mechanic: Light Skin Tone 👨🏼‍🔧 Man Mechanic: Medium-Light Skin Tone 👨🏽‍🔧 Man Mechanic: Medium Skin Tone 👨🏾‍🔧 Man Mechanic: Medium-Dark Skin Tone 👨🏿‍🔧 Man Mechanic: Dark Skin Tone 👩‍🔧 Woman Mechanic 👩🏻‍🔧 Woman Mechanic: Light Skin Tone 👩🏼‍🔧 Woman Mechanic: Medium-Light Skin Tone 👩🏽‍🔧 Woman Mechanic: Medium Skin Tone 👩🏾‍🔧 Woman Mechanic: Medium-Dark Skin Tone 👩🏿‍🔧 Woman Mechanic: Dark Skin Tone 👨‍🏭 Man Factory Worker 👨🏻‍🏭 Man Factory Worker: Light Skin Tone 👨🏼‍🏭 Man Factory Worker: Medium-Light Skin Tone 👨🏽‍🏭 Man Factory Worker: Medium Skin Tone 👨🏾‍🏭 Man Factory Worker: Medium-Dark Skin Tone 👨🏿‍🏭 Man Factory Worker: Dark Skin Tone 👩‍🏭 Woman Factory Worker 👩🏻‍🏭 Woman Factory Worker: Light Skin Tone 👩🏼‍🏭 Woman Factory Worker: Medium-Light Skin Tone 👩🏽‍🏭 Woman Factory Worker: Medium Skin Tone 👩🏾‍🏭 Woman Factory Worker: Medium-Dark Skin Tone 👩🏿‍🏭 Woman Factory Worker: Dark Skin Tone 👨‍💼 Man Office Worker 👨🏻‍💼 Man Office Worker: Light Skin Tone 👨🏼‍💼 Man Office Worker: Medium-Light Skin Tone 👨🏽‍💼 Man Office Worker: Medium Skin Tone 👨🏾‍💼 Man Office Worker: Medium-Dark Skin Tone 👨🏿‍💼 Man Office Worker: Dark Skin Tone 👩‍💼 Woman Office Worker 👩🏻‍💼 Woman Office Worker: Light Skin Tone 👩🏼‍💼 Woman Office Worker: Medium-Light Skin Tone 👩🏽‍💼 Woman Office Worker: Medium Skin Tone 👩🏾‍💼 Woman Office Worker: Medium-Dark Skin Tone 👩🏿‍💼 Woman Office Worker: Dark Skin Tone 👨‍🔬 Man Scientist 👨🏻‍🔬 Man Scientist: Light Skin Tone 👨🏼‍🔬 Man Scientist: Medium-Light Skin Tone 👨🏽‍🔬 Man Scientist: Medium Skin Tone 👨🏾‍🔬 Man Scientist: Medium-Dark Skin Tone 👨🏿‍🔬 Man Scientist: Dark Skin Tone 👩‍🔬 Woman Scientist 👩🏻‍🔬 Woman Scientist: Light Skin Tone 👩🏼‍🔬 Woman Scientist: Medium-Light Skin Tone 👩🏽‍🔬 Woman Scientist: Medium Skin Tone 👩🏾‍🔬 Woman Scientist: Medium-Dark Skin Tone 👩🏿‍🔬 Woman Scientist: Dark Skin Tone 👨‍💻 Man Technologist 👨🏻‍💻 Man Technologist: Light Skin Tone 👨🏼‍💻 Man Technologist: Medium-Light Skin Tone 👨🏽‍💻 Man Technologist: Medium Skin Tone 👨🏾‍💻 Man Technologist: Medium-Dark Skin Tone 👨🏿‍💻 Man Technologist: Dark Skin Tone 👩‍💻 Woman Technologist 👩🏻‍💻 Woman Technologist: Light Skin Tone 👩🏼‍💻 Woman Technologist: Medium-Light Skin Tone 👩🏽‍💻 Woman Technologist: Medium Skin Tone 👩🏾‍💻 Woman Technologist: Medium-Dark Skin Tone 👩🏿‍💻 Woman Technologist: Dark Skin Tone 👨‍🎤 Man Singer 👨🏻‍🎤 Man Singer: Light Skin Tone 👨🏼‍🎤 Man Singer: Medium-Light Skin Tone 👨🏽‍🎤 Man Singer: Medium Skin Tone 👨🏾‍🎤 Man Singer: Medium-Dark Skin Tone 👨🏿‍🎤 Man Singer: Dark Skin Tone 👩‍🎤 Woman Singer 👩🏻‍🎤 Woman Singer: Light Skin Tone 👩🏼‍🎤 Woman Singer: Medium-Light Skin Tone 👩🏽‍🎤 Woman Singer: Medium Skin Tone 👩🏾‍🎤 Woman Singer: Medium-Dark Skin Tone 👩🏿‍🎤 Woman Singer: Dark Skin Tone 👨‍🎨 Man Artist 👨🏻‍🎨 Man Artist: Light Skin Tone 👨🏼‍🎨 Man Artist: Medium-Light Skin Tone 👨🏽‍🎨 Man Artist: Medium Skin Tone 👨🏾‍🎨 Man Artist: Medium-Dark Skin Tone 👨🏿‍🎨 Man Artist: Dark Skin Tone 👩‍🎨 Woman Artist 👩🏻‍🎨 Woman Artist: Light Skin Tone 👩🏼‍🎨 Woman Artist: Medium-Light Skin Tone 👩🏽‍🎨 Woman Artist: Medium Skin Tone 👩🏾‍🎨 Woman Artist: Medium-Dark Skin Tone 👩🏿‍🎨 Woman Artist: Dark Skin Tone 👨‍✈️ Man Pilot 👨🏻‍✈️ Man Pilot: Light Skin Tone 👨🏼‍✈️ Man Pilot: Medium-Light Skin Tone 👨🏽‍✈️ Man Pilot: Medium Skin Tone 👨🏾‍✈️ Man Pilot: Medium-Dark Skin Tone 👨🏿‍✈️ Man Pilot: Dark Skin Tone 👩‍✈️ Woman Pilot 👩🏻‍✈️ Woman Pilot: Light Skin Tone 👩🏼‍✈️ Woman Pilot: Medium-Light Skin Tone 👩🏽‍✈️ Woman Pilot: Medium Skin Tone 👩🏾‍✈️ Woman Pilot: Medium-Dark Skin Tone 👩🏿‍✈️ Woman Pilot: Dark Skin Tone 👨‍🚀 Man Astronaut 👨🏻‍🚀 Man Astronaut: Light Skin Tone 👨🏼‍🚀 Man Astronaut: Medium-Light Skin Tone 👨🏽‍🚀 Man Astronaut: Medium Skin Tone 👨🏾‍🚀 Man Astronaut: Medium-Dark Skin Tone 👨🏿‍🚀 Man Astronaut: Dark Skin Tone 👩‍🚀 Woman Astronaut 👩🏻‍🚀 Woman Astronaut: Light Skin Tone 👩🏼‍🚀 Woman Astronaut: Medium-Light Skin Tone 👩🏽‍🚀 Woman Astronaut: Medium Skin Tone 👩🏾‍🚀 Woman Astronaut: Medium-Dark Skin Tone 👩🏿‍🚀 Woman Astronaut: Dark Skin Tone 👨‍🚒 Man Firefighter 👨🏻‍🚒 Man Firefighter: Light Skin Tone 👨🏼‍🚒 Man Firefighter: Medium-Light Skin Tone 👨🏽‍🚒 Man Firefighter: Medium Skin Tone 👨🏾‍🚒 Man Firefighter: Medium-Dark Skin Tone 👨🏿‍🚒 Man Firefighter: Dark Skin Tone 👩‍🚒 Woman Firefighter 👩🏻‍🚒 Woman Firefighter: Light Skin Tone 👩🏼‍🚒 Woman Firefighter: Medium-Light Skin Tone 👩🏽‍🚒 Woman Firefighter: Medium Skin Tone 👩🏾‍🚒 Woman Firefighter: Medium-Dark Skin Tone 👩🏿‍🚒 Woman Firefighter: Dark Skin Tone 👮 Police Officer 👮🏻 Police Officer: Light Skin Tone 👮🏼 Police Officer: Medium-Light Skin Tone 👮🏽 Police Officer: Medium Skin Tone 👮🏾 Police Officer: Medium-Dark Skin Tone 👮🏿 Police Officer: Dark Skin Tone 👮‍♂️ Man Police Officer 👮🏻‍♂️ Man Police Officer: Light Skin Tone 👮🏼‍♂️ Man Police Officer: Medium-Light Skin Tone 👮🏽‍♂️ Man Police Officer: Medium Skin Tone 👮🏾‍♂️ Man Police Officer: Medium-Dark Skin Tone 👮🏿‍♂️ Man Police Officer: Dark Skin Tone 👮‍♀️ Woman Police Officer 👮🏻‍♀️ Woman Police Officer: Light Skin Tone 👮🏼‍♀️ Woman Police Officer: Medium-Light Skin Tone 👮🏽‍♀️ Woman Police Officer: Medium Skin Tone 👮🏾‍♀️ Woman Police Officer: Medium-Dark Skin Tone 👮🏿‍♀️ Woman Police Officer: Dark Skin Tone 🕵 Detective 🕵🏻 Detective: Light Skin Tone 🕵🏼 Detective: Medium-Light Skin Tone 🕵🏽 Detective: Medium Skin Tone 🕵🏾 Detective: Medium-Dark Skin Tone 🕵🏿 Detective: Dark Skin Tone 🕵️‍♂️ Man Detective 🕵🏻‍♂️ Man Detective: Light Skin Tone 🕵🏼‍♂️ Man Detective: Medium-Light Skin Tone 🕵🏽‍♂️ Man Detective: Medium Skin Tone 🕵🏾‍♂️ Man Detective: Medium-Dark Skin Tone 🕵🏿‍♂️ Man Detective: Dark Skin Tone 🕵️‍♀️ Woman Detective 🕵🏻‍♀️ Woman Detective: Light Skin Tone 🕵🏼‍♀️ Woman Detective: Medium-Light Skin Tone 🕵🏽‍♀️ Woman Detective: Medium Skin Tone 🕵🏾‍♀️ Woman Detective: Medium-Dark Skin Tone 🕵🏿‍♀️ Woman Detective: Dark Skin Tone 💂 Guard 💂🏻 Guard: Light Skin Tone 💂🏼 Guard: Medium-Light Skin Tone 💂🏽 Guard: Medium Skin Tone 💂🏾 Guard: Medium-Dark Skin Tone 💂🏿 Guard: Dark Skin Tone 💂‍♂️ Man Guard 💂🏻‍♂️ Man Guard: Light Skin Tone 💂🏼‍♂️ Man Guard: Medium-Light Skin Tone 💂🏽‍♂️ Man Guard: Medium Skin Tone 💂🏾‍♂️ Man Guard: Medium-Dark Skin Tone 💂🏿‍♂️ Man Guard: Dark Skin Tone 💂‍♀️ Woman Guard 💂🏻‍♀️ Woman Guard: Light Skin Tone 💂🏼‍♀️ Woman Guard: Medium-Light Skin Tone 💂🏽‍♀️ Woman Guard: Medium Skin Tone 💂🏾‍♀️ Woman Guard: Medium-Dark Skin Tone 💂🏿‍♀️ Woman Guard: Dark Skin Tone 👷 Construction Worker 👷🏻 Construction Worker: Light Skin Tone 👷🏼 Construction Worker: Medium-Light Skin Tone 👷🏽 Construction Worker: Medium Skin Tone 👷🏾 Construction Worker: Medium-Dark Skin Tone 👷🏿 Construction Worker: Dark Skin Tone 👷‍♂️ Man Construction Worker 👷🏻‍♂️ Man Construction Worker: Light Skin Tone 👷🏼‍♂️ Man Construction Worker: Medium-Light Skin Tone 👷🏽‍♂️ Man Construction Worker: Medium Skin Tone 👷🏾‍♂️ Man Construction Worker: Medium-Dark Skin Tone 👷🏿‍♂️ Man Construction Worker: Dark Skin Tone 👷‍♀️ Woman Construction Worker 👷🏻‍♀️ Woman Construction Worker: Light Skin Tone 👷🏼‍♀️ Woman Construction Worker: Medium-Light Skin Tone 👷🏽‍♀️ Woman Construction Worker: Medium Skin Tone 👷🏾‍♀️ Woman Construction Worker: Medium-Dark Skin Tone 👷🏿‍♀️ Woman Construction Worker: Dark Skin Tone 🤴 Prince 🤴🏻 Prince: Light Skin Tone 🤴🏼 Prince: Medium-Light Skin Tone 🤴🏽 Prince: Medium Skin Tone 🤴🏾 Prince: Medium-Dark Skin Tone 🤴🏿 Prince: Dark Skin Tone 👸 Princess 👸🏻 Princess: Light Skin Tone 👸🏼 Princess: Medium-Light Skin Tone 👸🏽 Princess: Medium Skin Tone 👸🏾 Princess: Medium-Dark Skin Tone 👸🏿 Princess: Dark Skin Tone 👳 Person Wearing Turban 👳🏻 Person Wearing Turban: Light Skin Tone 👳🏼 Person Wearing Turban: Medium-Light Skin Tone 👳🏽 Person Wearing Turban: Medium Skin Tone 👳🏾 Person Wearing Turban: Medium-Dark Skin Tone 👳🏿 Person Wearing Turban: Dark Skin Tone 👳‍♂️ Man Wearing Turban 👳🏻‍♂️ Man Wearing Turban: Light Skin Tone 👳🏼‍♂️ Man Wearing Turban: Medium-Light Skin Tone 👳🏽‍♂️ Man Wearing Turban: Medium Skin Tone 👳🏾‍♂️ Man Wearing Turban: Medium-Dark Skin Tone 👳🏿‍♂️ Man Wearing Turban: Dark Skin Tone 👳‍♀️ Woman Wearing Turban 👳🏻‍♀️ Woman Wearing Turban: Light Skin Tone 👳🏼‍♀️ Woman Wearing Turban: Medium-Light Skin Tone 👳🏽‍♀️ Woman Wearing Turban: Medium Skin Tone 👳🏾‍♀️ Woman Wearing Turban: Medium-Dark Skin Tone 👳🏿‍♀️ Woman Wearing Turban: Dark Skin Tone 👲 Man With Chinese Cap 👲🏻 Man With Chinese Cap: Light Skin Tone 👲🏼 Man With Chinese Cap: Medium-Light Skin Tone 👲🏽 Man With Chinese Cap: Medium Skin Tone 👲🏾 Man With Chinese Cap: Medium-Dark Skin Tone 👲🏿 Man With Chinese Cap: Dark Skin Tone 🧕 Woman With Headscarf 🧕🏻 Person With Headscarf: Light Skin Tone 🧕🏼 Person With Headscarf: Medium-Light Skin Tone 🧕🏽 Person With Headscarf: Medium Skin Tone 🧕🏾 Person With Headscarf: Medium-Dark Skin Tone 🧕🏿 Person With Headscarf: Dark Skin Tone 🧔 Bearded Person 🧔🏻 Bearded Person: Light Skin Tone 🧔🏼 Bearded Person: Medium-Light Skin Tone 🧔🏽 Bearded Person: Medium Skin Tone 🧔🏾 Bearded Person: Medium-Dark Skin Tone 🧔🏿 Bearded Person: Dark Skin Tone 👱 Blond-Haired Person 👱🏻 Blond-Haired Person: Light Skin Tone 👱🏼 Blond-Haired Person: Medium-Light Skin Tone 👱🏽 Blond-Haired Person: Medium Skin Tone 👱🏾 Blond-Haired Person: Medium-Dark Skin Tone 👱🏿 Blond-Haired Person: Dark Skin Tone 👱‍♂️ Blond-Haired Man 👱🏻‍♂️ Blond-Haired Man: Light Skin Tone 👱🏼‍♂️ Blond-Haired Man: Medium-Light Skin Tone 👱🏽‍♂️ Blond-Haired Man: Medium Skin Tone 👱🏾‍♂️ Blond-Haired Man: Medium-Dark Skin Tone 👱🏿‍♂️ Blond-Haired Man: Dark Skin Tone 👱‍♀️ Blond-Haired Woman 👱🏻‍♀️ Blond-Haired Woman: Light Skin Tone 👱🏼‍♀️ Blond-Haired Woman: Medium-Light Skin Tone 👱🏽‍♀️ Blond-Haired Woman: Medium Skin Tone 👱🏾‍♀️ Blond-Haired Woman: Medium-Dark Skin Tone 👱🏿‍♀️ Blond-Haired Woman: Dark Skin Tone 🤵 Man in Tuxedo 🤵🏻 Man in Tuxedo: Light Skin Tone 🤵🏼 Man in Tuxedo: Medium-Light Skin Tone 🤵🏽 Man in Tuxedo: Medium Skin Tone 🤵🏾 Man in Tuxedo: Medium-Dark Skin Tone 🤵🏿 Man in Tuxedo: Dark Skin Tone 👰 Bride With Veil 👰🏻 Bride With Veil: Light Skin Tone 👰🏼 Bride With Veil: Medium-Light Skin Tone 👰🏽 Bride With Veil: Medium Skin Tone 👰🏾 Bride With Veil: Medium-Dark Skin Tone 👰🏿 Bride With Veil: Dark Skin Tone 🤰 Pregnant Woman 🤰🏻 Pregnant Woman: Light Skin Tone 🤰🏼 Pregnant Woman: Medium-Light Skin Tone 🤰🏽 Pregnant Woman: Medium Skin Tone 🤰🏾 Pregnant Woman: Medium-Dark Skin Tone 🤰🏿 Pregnant Woman: Dark Skin Tone 🤱 Breast-Feeding 🤱🏻 Breast-Feeding: Light Skin Tone 🤱🏼 Breast-Feeding: Medium-Light Skin Tone 🤱🏽 Breast-Feeding: Medium Skin Tone 🤱🏾 Breast-Feeding: Medium-Dark Skin Tone 🤱🏿 Breast-Feeding: Dark Skin Tone 👼 Baby Angel 👼🏻 Baby Angel: Light Skin Tone 👼🏼 Baby Angel: Medium-Light Skin Tone 👼🏽 Baby Angel: Medium Skin Tone 👼🏾 Baby Angel: Medium-Dark Skin Tone 👼🏿 Baby Angel: Dark Skin Tone 🎅 Santa Claus 🎅🏻 Santa Claus: Light Skin Tone 🎅🏼 Santa Claus: Medium-Light Skin Tone 🎅🏽 Santa Claus: Medium Skin Tone 🎅🏾 Santa Claus: Medium-Dark Skin Tone 🎅🏿 Santa Claus: Dark Skin Tone 🤶 Mrs. Claus 🤶🏻 Mrs. Claus: Light Skin Tone 🤶🏼 Mrs. Claus: Medium-Light Skin Tone 🤶🏽 Mrs. Claus: Medium Skin Tone 🤶🏾 Mrs. Claus: Medium-Dark Skin Tone 🤶🏿 Mrs. Claus: Dark Skin Tone 🧙 Mage 🧙🏻 Mage: Light Skin Tone 🧙🏼 Mage: Medium-Light Skin Tone 🧙🏽 Mage: Medium Skin Tone 🧙🏾 Mage: Medium-Dark Skin Tone 🧙🏿 Mage: Dark Skin Tone 🧙‍♀️ Woman Mage 🧙🏻‍♀️ Woman Mage: Light Skin Tone 🧙🏼‍♀️ Woman Mage: Medium-Light Skin Tone 🧙🏽‍♀️ Woman Mage: Medium Skin Tone 🧙🏾‍♀️ Woman Mage: Medium-Dark Skin Tone 🧙🏿‍♀️ Woman Mage: Dark Skin Tone 🧙‍♂️ Man Mage 🧙🏻‍♂️ Man Mage: Light Skin Tone 🧙🏼‍♂️ Man Mage: Medium-Light Skin Tone 🧙🏽‍♂️ Man Mage: Medium Skin Tone 🧙🏾‍♂️ Man Mage: Medium-Dark Skin Tone 🧙🏿‍♂️ Man Mage: Dark Skin Tone 🧚 Fairy 🧚🏻 Fairy: Light Skin Tone 🧚🏼 Fairy: Medium-Light Skin Tone 🧚🏽 Fairy: Medium Skin Tone 🧚🏾 Fairy: Medium-Dark Skin Tone 🧚🏿 Fairy: Dark Skin Tone 🧚‍♀️ Woman Fairy 🧚🏻‍♀️ Woman Fairy: Light Skin Tone 🧚🏼‍♀️ Woman Fairy: Medium-Light Skin Tone 🧚🏽‍♀️ Woman Fairy: Medium Skin Tone 🧚🏾‍♀️ Woman Fairy: Medium-Dark Skin Tone 🧚🏿‍♀️ Woman Fairy: Dark Skin Tone 🧚‍♂️ Man Fairy 🧚🏻‍♂️ Man Fairy: Light Skin Tone 🧚🏼‍♂️ Man Fairy: Medium-Light Skin Tone 🧚🏽‍♂️ Man Fairy: Medium Skin Tone 🧚🏾‍♂️ Man Fairy: Medium-Dark Skin Tone 🧚🏿‍♂️ Man Fairy: Dark Skin Tone 🧛 Vampire 🧛🏻 Vampire: Light Skin Tone 🧛🏼 Vampire: Medium-Light Skin Tone 🧛🏽 Vampire: Medium Skin Tone 🧛🏾 Vampire: Medium-Dark Skin Tone 🧛🏿 Vampire: Dark Skin Tone 🧛‍♀️ Woman Vampire 🧛🏻‍♀️ Woman Vampire: Light Skin Tone 🧛🏼‍♀️ Woman Vampire: Medium-Light Skin Tone 🧛🏽‍♀️ Woman Vampire: Medium Skin Tone 🧛🏾‍♀️ Woman Vampire: Medium-Dark Skin Tone 🧛🏿‍♀️ Woman Vampire: Dark Skin Tone 🧛‍♂️ Man Vampire 🧛🏻‍♂️ Man Vampire: Light Skin Tone 🧛🏼‍♂️ Man Vampire: Medium-Light Skin Tone 🧛🏽‍♂️ Man Vampire: Medium Skin Tone 🧛🏾‍♂️ Man Vampire: Medium-Dark Skin Tone 👯🏻 Woman With Bunny Ears, Type-1-2 👯🏼 Woman With Bunny Ears, Type-3 🧛🏿‍♂️ Man Vampire: Dark Skin Tone 👯🏽 Woman With Bunny Ears, Type-4 👯🏾 Woman With Bunny Ears, Type-5 🧜 Merperson 👯🏿 Woman With Bunny Ears, Type-6 🧜🏻 Merperson: Light Skin Tone 👯🏻‍♂️ Men With Bunny Ears Partying, Type-1-2 🧜🏼 Merperson: Medium-Light Skin Tone 👯🏼‍♂️ Men With Bunny Ears Partying, Type-3 🧜🏽 Merperson: Medium Skin Tone 👯🏽‍♂️ Men With Bunny Ears Partying, Type-4 🧜🏾 Merperson: Medium-Dark Skin Tone 👯🏾‍♂️ Men With Bunny Ears Partying, Type-5 🧜🏿 Merperson: Dark Skin Tone 👯🏿‍♂️ Men With Bunny Ears Partying, Type-6 🧜‍♀️ Mermaid 👯🏻‍♀️ Women With Bunny Ears Partying, Type-1-2 🧜🏻‍♀️ Mermaid: Light Skin Tone 👯🏼‍♀️ Women With Bunny Ears Partying, Type-3 🧜🏼‍♀️ Mermaid: Medium-Light Skin Tone 👯🏽‍♀️ Women With Bunny Ears Partying, Type-4 👯🏾‍♀️ Women With Bunny Ears Partying, Type-5 🧜🏽‍♀️ Mermaid: Medium Skin Tone 👯🏿‍♀️ Women With Bunny Ears Partying, Type-6 🧜🏾‍♀️ Mermaid: Medium-Dark Skin Tone 🧜🏿‍♀️ Mermaid: Dark Skin Tone 🧜‍♂️ Merman 🧜🏻‍♂️ Merman: Light Skin Tone 🧜🏼‍♂️ Merman: Medium-Light Skin Tone 👫🏻 Man and Woman Holding Hands, Type-1-2 🧜🏽‍♂️ Merman: Medium Skin Tone 👫🏼 Man and Woman Holding Hands, Type-3 👫🏽 Man and Woman Holding Hands, Type-4 🧜🏾‍♂️ Merman: Medium-Dark Skin Tone 👫🏾 Man and Woman Holding Hands, Type-5 👫🏿 Man and Woman Holding Hands, Type-6 🧜🏿‍♂️ Merman: Dark Skin Tone 👬🏻 Two Men Holding Hands, Type-1-2 🧝 Elf 👬🏼 Two Men Holding Hands, Type-3 👬🏽 Two Men Holding Hands, Type-4 🧝🏻 Elf: Light Skin Tone 👬🏾 Two Men Holding Hands, Type-5 🧝🏼 Elf: Medium-Light Skin Tone 👬🏿 Two Men Holding Hands, Type-6 🧝🏽 Elf: Medium Skin Tone 🧝🏾 Elf: Medium-Dark Skin Tone 👭🏻 Two Women Holding Hands, Type-1-2 🧝🏿 Elf: Dark Skin Tone 🧝‍♀️ Woman Elf 👭🏼 Two Women Holding Hands, Type-3 👭🏽 Two Women Holding Hands, Type-4 🧝🏻‍♀️ Woman Elf: Light Skin Tone 👭🏾 Two Women Holding Hands, Type-5 👭🏿 Two Women Holding Hands, Type-6 🧝🏼‍♀️ Woman Elf: Medium-Light Skin Tone 🧝🏽‍♀️ Woman Elf: Medium Skin Tone 🧝🏾‍♀️ Woman Elf: Medium-Dark Skin Tone 🧝🏿‍♀️ Woman Elf: Dark Skin Tone 🧝‍♂️ Man Elf 👪🏻 Family, Type-1-2 🧝🏻‍♂️ Man Elf: Light Skin Tone 👪🏼 Family, Type-3 👪🏽 Family, Type-4 🧝🏼‍♂️ Man Elf: Medium-Light Skin Tone 👪🏾 Family, Type-5 👪🏿 Family, Type-6 🧝🏽‍♂️ Man Elf: Medium Skin Tone 🧝🏾‍♂️ Man Elf: Medium-Dark Skin Tone 🧝🏿‍♂️ Man Elf: Dark Skin Tone 🧞 Genie 🧞‍♀️ Woman Genie 🧞‍♂️ Man Genie 🧟 Zombie 🧟‍♀️ Woman Zombie 🧟‍♂️ Man Zombie 🙍 Person Frowning 🙍🏻 Person Frowning: Light Skin Tone 🙍🏼 Person Frowning: Medium-Light Skin Tone 🙍🏽 Person Frowning: Medium Skin Tone 🙍🏾 Person Frowning: Medium-Dark Skin Tone 🙍🏿 Person Frowning: Dark Skin Tone 🙍‍♂️ Man Frowning 🙍🏻‍♂️ Man Frowning: Light Skin Tone 🏻 Light Skin Tone 🏼 Medium-Light Skin Tone 🙍🏼‍♂️ Man Frowning: Medium-Light Skin Tone 🏽 Medium Skin Tone 🙍🏽‍♂️ Man Frowning: Medium Skin Tone 🏾 Medium-Dark Skin Tone 🏿 Dark Skin Tone 🙍🏾‍♂️ Man Frowning: Medium-Dark Skin Tone 🙍🏿‍♂️ Man Frowning: Dark Skin Tone 🙍‍♀️ Woman Frowning 🙍🏻‍♀️ Woman Frowning: Light Skin Tone 🙍🏼‍♀️ Woman Frowning: Medium-Light Skin Tone 🙍🏽‍♀️ Woman Frowning: Medium Skin Tone 🙍🏾‍♀️ Woman Frowning: Medium-Dark Skin Tone 🙍🏿‍♀️ Woman Frowning: Dark Skin Tone 🙎 Person Pouting 🙎🏻 Person Pouting: Light Skin Tone 🙎🏼 Person Pouting: Medium-Light Skin Tone 🙎🏽 Person Pouting: Medium Skin Tone 🙎🏾 Person Pouting: Medium-Dark Skin Tone 🙎🏿 Person Pouting: Dark Skin Tone 🙎‍♂️ Man Pouting 🙎🏻‍♂️ Man Pouting: Light Skin Tone 🙎🏼‍♂️ Man Pouting: Medium-Light Skin Tone 🙎🏽‍♂️ Man Pouting: Medium Skin Tone 🙎🏾‍♂️ Man Pouting: Medium-Dark Skin Tone 🙎🏿‍♂️ Man Pouting: Dark Skin Tone 🙎‍♀️ Woman Pouting 🙎🏻‍♀️ Woman Pouting: Light Skin Tone 🙎🏼‍♀️ Woman Pouting: Medium-Light Skin Tone 🙎🏽‍♀️ Woman Pouting: Medium Skin Tone 🙎🏾‍♀️ Woman Pouting: Medium-Dark Skin Tone 🙎🏿‍♀️ Woman Pouting: Dark Skin Tone 🙅 Person Gesturing No 🙅🏻 Person Gesturing No: Light Skin Tone 🙅🏼 Person Gesturing No: Medium-Light Skin Tone 🙅🏽 Person Gesturing No: Medium Skin Tone 🙅🏾 Person Gesturing No: Medium-Dark Skin Tone 🙅🏿 Person Gesturing No: Dark Skin Tone 🙅‍♂️ Man Gesturing No 🙅🏻‍♂️ Man Gesturing No: Light Skin Tone 🙅🏼‍♂️ Man Gesturing No: Medium-Light Skin Tone 🙅🏽‍♂️ Man Gesturing No: Medium Skin Tone 🙅🏾‍♂️ Man Gesturing No: Medium-Dark Skin Tone 🙅🏿‍♂️ Man Gesturing No: Dark Skin Tone 🙅‍♀️ Woman Gesturing No 🙅🏻‍♀️ Woman Gesturing No: Light Skin Tone 🙅🏼‍♀️ Woman Gesturing No: Medium-Light Skin Tone 🙅🏽‍♀️ Woman Gesturing No: Medium Skin Tone 🙅🏾‍♀️ Woman Gesturing No: Medium-Dark Skin Tone 🙅🏿‍♀️ Woman Gesturing No: Dark Skin Tone 🙆 Person Gesturing OK 🙆🏻 Person Gesturing OK: Light Skin Tone 🙆🏼 Person Gesturing OK: Medium-Light Skin Tone 🙆🏽 Person Gesturing OK: Medium Skin Tone 🙆🏾 Person Gesturing OK: Medium-Dark Skin Tone 🙆🏿 Person Gesturing OK: Dark Skin Tone 🙆‍♂️ Man Gesturing OK 🙆🏻‍♂️ Man Gesturing OK: Light Skin Tone 🙆🏼‍♂️ Man Gesturing OK: Medium-Light Skin Tone 🙆🏽‍♂️ Man Gesturing OK: Medium Skin Tone 🙆🏾‍♂️ Man Gesturing OK: Medium-Dark Skin Tone 🙆🏿‍♂️ Man Gesturing OK: Dark Skin Tone 🙆‍♀️ Woman Gesturing OK 🙆🏻‍♀️ Woman Gesturing OK: Light Skin Tone 🙆🏼‍♀️ Woman Gesturing OK: Medium-Light Skin Tone 🙆🏽‍♀️ Woman Gesturing OK: Medium Skin Tone 🙆🏾‍♀️ Woman Gesturing OK: Medium-Dark Skin Tone 🙆🏿‍♀️ Woman Gesturing OK: Dark Skin Tone 💁 Person Tipping Hand 💁🏻 Person Tipping Hand: Light Skin Tone 💁🏼 Person Tipping Hand: Medium-Light Skin Tone 💁🏽 Person Tipping Hand: Medium Skin Tone 💁🏾 Person Tipping Hand: Medium-Dark Skin Tone 💁🏿 Person Tipping Hand: Dark Skin Tone 💁‍♂️ Man Tipping Hand 💁🏻‍♂️ Man Tipping Hand: Light Skin Tone 💁🏼‍♂️ Man Tipping Hand: Medium-Light Skin Tone 💁🏽‍♂️ Man Tipping Hand: Medium Skin Tone 💁🏾‍♂️ Man Tipping Hand: Medium-Dark Skin Tone 💁🏿‍♂️ Man Tipping Hand: Dark Skin Tone 💁‍♀️ Woman Tipping Hand 💁🏻‍♀️ Woman Tipping Hand: Light Skin Tone 💁🏼‍♀️ Woman Tipping Hand: Medium-Light Skin Tone 💁🏽‍♀️ Woman Tipping Hand: Medium Skin Tone 💁🏾‍♀️ Woman Tipping Hand: Medium-Dark Skin Tone 💁🏿‍♀️ Woman Tipping Hand: Dark Skin Tone 🙋 Person Raising Hand 🙋🏻 Person Raising Hand: Light Skin Tone 🙋🏼 Person Raising Hand: Medium-Light Skin Tone 🙋🏽 Person Raising Hand: Medium Skin Tone 🙋🏾 Person Raising Hand: Medium-Dark Skin Tone 🙋🏿 Person Raising Hand: Dark Skin Tone 🙋‍♂️ Man Raising Hand 🙋🏻‍♂️ Man Raising Hand: Light Skin Tone 🙋🏼‍♂️ Man Raising Hand: Medium-Light Skin Tone 🙋🏽‍♂️ Man Raising Hand: Medium Skin Tone 🙋🏾‍♂️ Man Raising Hand: Medium-Dark Skin Tone 🙋🏿‍♂️ Man Raising Hand: Dark Skin Tone 🙋‍♀️ Woman Raising Hand 🙋🏻‍♀️ Woman Raising Hand: Light Skin Tone 🙋🏼‍♀️ Woman Raising Hand: Medium-Light Skin Tone 🙋🏽‍♀️ Woman Raising Hand: Medium Skin Tone 🙋🏾‍♀️ Woman Raising Hand: Medium-Dark Skin Tone 🙋🏿‍♀️ Woman Raising Hand: Dark Skin Tone 🙇 Person Bowing 🙇🏻 Person Bowing: Light Skin Tone 🙇🏼 Person Bowing: Medium-Light Skin Tone 🙇🏽 Person Bowing: Medium Skin Tone 🙇🏾 Person Bowing: Medium-Dark Skin Tone 🙇🏿 Person Bowing: Dark Skin Tone 🙇‍♂️ Man Bowing 🙇🏻‍♂️ Man Bowing: Light Skin Tone 🤝🏻 Handshake, Type-1-2 🙇🏼‍♂️ Man Bowing: Medium-Light Skin Tone 🤝🏼 Handshake, Type-3 🤝🏽 Handshake, Type-4 🙇🏽‍♂️ Man Bowing: Medium Skin Tone 🤝🏾 Handshake, Type-5 🤝🏿 Handshake, Type-6 🙇🏾‍♂️ Man Bowing: Medium-Dark Skin Tone 🙇🏿‍♂️ Man Bowing: Dark Skin Tone 🙇‍♀️ Woman Bowing 🙇🏻‍♀️ Woman Bowing: Light Skin Tone 🙇🏼‍♀️ Woman Bowing: Medium-Light Skin Tone 🙇🏽‍♀️ Woman Bowing: Medium Skin Tone 🙇🏾‍♀️ Woman Bowing: Medium-Dark Skin Tone 🙇🏿‍♀️ Woman Bowing: Dark Skin Tone 🤦 Person Facepalming 🤦🏻 Person Facepalming: Light Skin Tone 🤦🏼 Person Facepalming: Medium-Light Skin Tone 🤦🏽 Person Facepalming: Medium Skin Tone 🤦🏾 Person Facepalming: Medium-Dark Skin Tone 🤦🏿 Person Facepalming: Dark Skin Tone 🤦‍♂️ Man Facepalming 🤦🏻‍♂️ Man Facepalming: Light Skin Tone 🤦🏼‍♂️ Man Facepalming: Medium-Light Skin Tone 🤦🏽‍♂️ Man Facepalming: Medium Skin Tone 🤦🏾‍♂️ Man Facepalming: Medium-Dark Skin Tone 🤦🏿‍♂️ Man Facepalming: Dark Skin Tone 🤦‍♀️ Woman Facepalming 🤦🏻‍♀️ Woman Facepalming: Light Skin Tone 🤦🏼‍♀️ Woman Facepalming: Medium-Light Skin Tone 🤦🏽‍♀️ Woman Facepalming: Medium Skin Tone 🤦🏾‍♀️ Woman Facepalming: Medium-Dark Skin Tone 🤦🏿‍♀️ Woman Facepalming: Dark Skin Tone 🤷 Person Shrugging 🤷🏻 Person Shrugging: Light Skin Tone 🤷🏼 Person Shrugging: Medium-Light Skin Tone 🤷🏽 Person Shrugging: Medium Skin Tone 🤷🏾 Person Shrugging: Medium-Dark Skin Tone 🤷🏿 Person Shrugging: Dark Skin Tone 🤷‍♂️ Man Shrugging 🤷🏻‍♂️ Man Shrugging: Light Skin Tone 🤷🏼‍♂️ Man Shrugging: Medium-Light Skin Tone 🤷🏽‍♂️ Man Shrugging: Medium Skin Tone 🤷🏾‍♂️ Man Shrugging: Medium-Dark Skin Tone 🤷🏿‍♂️ Man Shrugging: Dark Skin Tone 🤷‍♀️ Woman Shrugging 🤷🏻‍♀️ Woman Shrugging: Light Skin Tone 🤷🏼‍♀️ Woman Shrugging: Medium-Light Skin Tone 🤷🏽‍♀️ Woman Shrugging: Medium Skin Tone 🤷🏾‍♀️ Woman Shrugging: Medium-Dark Skin Tone 🤷🏿‍♀️ Woman Shrugging: Dark Skin Tone 💆 Person Getting Massage 💆🏻 Person Getting Massage: Light Skin Tone 💆🏼 Person Getting Massage: Medium-Light Skin Tone 💆🏽 Person Getting Massage: Medium Skin Tone 💆🏾 Person Getting Massage: Medium-Dark Skin Tone 💆🏿 Person Getting Massage: Dark Skin Tone 💆‍♂️ Man Getting Massage 💆🏻‍♂️ Man Getting Massage: Light Skin Tone 💆🏼‍♂️ Man Getting Massage: Medium-Light Skin Tone 💆🏽‍♂️ Man Getting Massage: Medium Skin Tone 💆🏾‍♂️ Man Getting Massage: Medium-Dark Skin Tone 💆🏿‍♂️ Man Getting Massage: Dark Skin Tone 💆‍♀️ Woman Getting Massage 💆🏻‍♀️ Woman Getting Massage: Light Skin Tone 💆🏼‍♀️ Woman Getting Massage: Medium-Light Skin Tone 💆🏽‍♀️ Woman Getting Massage: Medium Skin Tone 💆🏾‍♀️ Woman Getting Massage: Medium-Dark Skin Tone 💆🏿‍♀️ Woman Getting Massage: Dark Skin Tone 💇 Person Getting Haircut 💇🏻 Person Getting Haircut: Light Skin Tone 💇🏼 Person Getting Haircut: Medium-Light Skin Tone 💇🏽 Person Getting Haircut: Medium Skin Tone 💇🏾 Person Getting Haircut: Medium-Dark Skin Tone 💇🏿 Person Getting Haircut: Dark Skin Tone 💇‍♂️ Man Getting Haircut 💇🏻‍♂️ Man Getting Haircut: Light Skin Tone 💇🏼‍♂️ Man Getting Haircut: Medium-Light Skin Tone 💇🏽‍♂️ Man Getting Haircut: Medium Skin Tone 💇🏾‍♂️ Man Getting Haircut: Medium-Dark Skin Tone 💇🏿‍♂️ Man Getting Haircut: Dark Skin Tone 💇‍♀️ Woman Getting Haircut 💇🏻‍♀️ Woman Getting Haircut: Light Skin Tone 💇🏼‍♀️ Woman Getting Haircut: Medium-Light Skin Tone 💇🏽‍♀️ Woman Getting Haircut: Medium Skin Tone 💇🏾‍♀️ Woman Getting Haircut: Medium-Dark Skin Tone 💇🏿‍♀️ Woman Getting Haircut: Dark Skin Tone 🚶 Person Walking 🚶🏻 Person Walking: Light Skin Tone 🚶🏼 Person Walking: Medium-Light Skin Tone 🚶🏽 Person Walking: Medium Skin Tone 🚶🏾 Person Walking: Medium-Dark Skin Tone 🚶🏿 Person Walking: Dark Skin Tone 🚶‍♂️ Man Walking 🚶🏻‍♂️ Man Walking: Light Skin Tone 🚶🏼‍♂️ Man Walking: Medium-Light Skin Tone 🚶🏽‍♂️ Man Walking: Medium Skin Tone 🚶🏾‍♂️ Man Walking: Medium-Dark Skin Tone 🚶🏿‍♂️ Man Walking: Dark Skin Tone 🚶‍♀️ Woman Walking 🚶🏻‍♀️ Woman Walking: Light Skin Tone 🚶🏼‍♀️ Woman Walking: Medium-Light Skin Tone 🚶🏽‍♀️ Woman Walking: Medium Skin Tone 🚶🏾‍♀️ Woman Walking: Medium-Dark Skin Tone 🚶🏿‍♀️ Woman Walking: Dark Skin Tone 🏃 Person Running 🏃🏻 Person Running: Light Skin Tone 🏃🏼 Person Running: Medium-Light Skin Tone 🏃🏽 Person Running: Medium Skin Tone 🏃🏾 Person Running: Medium-Dark Skin Tone 🏃🏿 Person Running: Dark Skin Tone 🏃‍♂️ Man Running 🏃🏻‍♂️ Man Running: Light Skin Tone 🏃🏼‍♂️ Man Running: Medium-Light Skin Tone 🏃🏽‍♂️ Man Running: Medium Skin Tone 🏃🏾‍♂️ Man Running: Medium-Dark Skin Tone 🏃🏿‍♂️ Man Running: Dark Skin Tone 🏃‍♀️ Woman Running 🏃🏻‍♀️ Woman Running: Light Skin Tone 🏃🏼‍♀️ Woman Running: Medium-Light Skin Tone 🏃🏽‍♀️ Woman Running: Medium Skin Tone 🏃🏾‍♀️ Woman Running: Medium-Dark Skin Tone 🏃🏿‍♀️ Woman Running: Dark Skin Tone 💃 Woman Dancing 💃🏻 Woman Dancing: Light Skin Tone 💃🏼 Woman Dancing: Medium-Light Skin Tone 💃🏽 Woman Dancing: Medium Skin Tone 💃🏾 Woman Dancing: Medium-Dark Skin Tone 💃🏿 Woman Dancing: Dark Skin Tone 🕺 Man Dancing 🕺🏻 Man Dancing: Light Skin Tone 🕺🏼 Man Dancing: Medium-Light Skin Tone 🕺🏽 Man Dancing: Medium Skin Tone 🕺🏾 Man Dancing: Medium-Dark Skin Tone 🕺🏿 Man Dancing: Dark Skin Tone 👯 People With Bunny Ears Partying 👯‍♂️ Men With Bunny Ears Partying 👯‍♀️ Women With Bunny Ears Partying 🧖 Person in Steamy Room 🧖🏻 Person in Steamy Room: Light Skin Tone 🧖🏼 Person in Steamy Room: Medium-Light Skin Tone 🧖🏽 Person in Steamy Room: Medium Skin Tone 🧖🏾 Person in Steamy Room: Medium-Dark Skin Tone 🧖🏿 Person in Steamy Room: Dark Skin Tone 🧖‍♀️ Woman in Steamy Room 🧖🏻‍♀️ Woman in Steamy Room: Light Skin Tone 🧖🏼‍♀️ Woman in Steamy Room: Medium-Light Skin Tone 🧖🏽‍♀️ Woman in Steamy Room: Medium Skin Tone 🧖🏾‍♀️ Woman in Steamy Room: Medium-Dark Skin Tone 🧖🏿‍♀️ Woman in Steamy Room: Dark Skin Tone 🧖‍♂️ Man in Steamy Room 🧖🏻‍♂️ Man in Steamy Room: Light Skin Tone 🧖🏼‍♂️ Man in Steamy Room: Medium-Light Skin Tone 🧖🏽‍♂️ Man in Steamy Room: Medium Skin Tone 🧖🏾‍♂️ Man in Steamy Room: Medium-Dark Skin Tone 🧖🏿‍♂️ Man in Steamy Room: Dark Skin Tone 🧗 Person Climbing 🧗🏻 Person Climbing: Light Skin Tone 🧗🏼 Person Climbing: Medium-Light Skin Tone 🧗🏽 Person Climbing: Medium Skin Tone 🧗🏾 Person Climbing: Medium-Dark Skin Tone 🧗🏿 Person Climbing: Dark Skin Tone 🧗‍♀️ Woman Climbing 🧗🏻‍♀️ Woman Climbing: Light Skin Tone 🧗🏼‍♀️ Woman Climbing: Medium-Light Skin Tone 🧗🏽‍♀️ Woman Climbing: Medium Skin Tone 🧗🏾‍♀️ Woman Climbing: Medium-Dark Skin Tone 🧗🏿‍♀️ Woman Climbing: Dark Skin Tone 🧗‍♂️ Man Climbing 🧗🏻‍♂️ Man Climbing: Light Skin Tone 🧗🏼‍♂️ Man Climbing: Medium-Light Skin Tone 🧗🏽‍♂️ Man Climbing: Medium Skin Tone 🧗🏾‍♂️ Man Climbing: Medium-Dark Skin Tone 🧗🏿‍♂️ Man Climbing: Dark Skin Tone 🧘 Person in Lotus Position 🧘🏻 Person in Lotus Position: Light Skin Tone 🧘🏼 Person in Lotus Position: Medium-Light Skin Tone 🧘🏽 Person in Lotus Position: Medium Skin Tone 🧘🏾 Person in Lotus Position: Medium-Dark Skin Tone 🧘🏿 Person in Lotus Position: Dark Skin Tone 🧘‍♀️ Woman in Lotus Position 🧘🏻‍♀️ Woman in Lotus Position: Light Skin Tone 🧘🏼‍♀️ Woman in Lotus Position: Medium-Light Skin Tone 🧘🏽‍♀️ Woman in Lotus Position: Medium Skin Tone 🧘🏾‍♀️ Woman in Lotus Position: Medium-Dark Skin Tone 🧘🏿‍♀️ Woman in Lotus Position: Dark Skin Tone 🧘‍♂️ Man in Lotus Position 🧘🏻‍♂️ Man in Lotus Position: Light Skin Tone 🧘🏼‍♂️ Man in Lotus Position: Medium-Light Skin Tone 🧘🏽‍♂️ Man in Lotus Position: Medium Skin Tone 🧘🏾‍♂️ Man in Lotus Position: Medium-Dark Skin Tone 🧘🏿‍♂️ Man in Lotus Position: Dark Skin Tone 🛀 Person Taking Bath 🛀🏻 Person Taking Bath: Light Skin Tone 🛀🏼 Person Taking Bath: Medium-Light Skin Tone 🛀🏽 Person Taking Bath: Medium Skin Tone 🛀🏾 Person Taking Bath: Medium-Dark Skin Tone 🛀🏿 Person Taking Bath: Dark Skin Tone 🛌 Person in Bed 🛌🏻 Person in Bed: Light Skin Tone 🛌🏼 Person in Bed: Medium-Light Skin Tone 🛌🏽 Person in Bed: Medium Skin Tone 🛌🏾 Person in Bed: Medium-Dark Skin Tone 🛌🏿 Person in Bed: Dark Skin Tone 🕴 Man in Business Suit Levitating 🕴🏻 Man in Business Suit Levitating: Light Skin Tone 🕴🏼 Man in Business Suit Levitating: Medium-Light Skin Tone 🕴🏽 Man in Business Suit Levitating: Medium Skin Tone 🕴🏾 Man in Business Suit Levitating: Medium-Dark Skin Tone 🕴🏿 Man in Business Suit Levitating: Dark Skin Tone 🗣 Speaking Head 👤 Bust in Silhouette 👥 Busts in Silhouette 🤺 Person Fencing 🏇 Horse Racing 🏇🏻 Horse Racing: Light Skin Tone 🏇🏼 Horse Racing: Medium-Light Skin Tone 🏇🏽 Horse Racing: Medium Skin Tone 🏇🏾 Horse Racing: Medium-Dark Skin Tone 🏇🏿 Horse Racing: Dark Skin Tone ⛷ Skier 🏂 Snowboarder 🏂🏻 Snowboarder: Light Skin Tone 🏂🏼 Snowboarder: Medium-Light Skin Tone 🏂🏽 Snowboarder: Medium Skin Tone 🏂🏾 Snowboarder: Medium-Dark Skin Tone 🏂🏿 Snowboarder: Dark Skin Tone 🏌 Person Golfing 🏌🏻 Person Golfing: Light Skin Tone 🏌🏼 Person Golfing: Medium-Light Skin Tone 🏌🏽 Person Golfing: Medium Skin Tone 🏌🏾 Person Golfing: Medium-Dark Skin Tone 🏌🏿 Person Golfing: Dark Skin Tone 🏌️‍♂️ Man Golfing 🏌🏻‍♂️ Man Golfing: Light Skin Tone 🏌🏼‍♂️ Man Golfing: Medium-Light Skin Tone 🏌🏽‍♂️ Man Golfing: Medium Skin Tone 🏌🏾‍♂️ Man Golfing: Medium-Dark Skin Tone 🏌🏿‍♂️ Man Golfing: Dark Skin Tone 🏌️‍♀️ Woman Golfing 🏌🏻‍♀️ Woman Golfing: Light Skin Tone 🏌🏼‍♀️ Woman Golfing: Medium-Light Skin Tone 🏌🏽‍♀️ Woman Golfing: Medium Skin Tone 🏌🏾‍♀️ Woman Golfing: Medium-Dark Skin Tone 🏌🏿‍♀️ Woman Golfing: Dark Skin Tone 🏄 Person Surfing 🏄🏻 Person Surfing: Light Skin Tone 🏄🏼 Person Surfing: Medium-Light Skin Tone 🏄🏽 Person Surfing: Medium Skin Tone 🏄🏾 Person Surfing: Medium-Dark Skin Tone 🏄🏿 Person Surfing: Dark Skin Tone 🏄‍♂️ Man Surfing 🏄🏻‍♂️ Man Surfing: Light Skin Tone 🏄🏼‍♂️ Man Surfing: Medium-Light Skin Tone 🏄🏽‍♂️ Man Surfing: Medium Skin Tone 🏄🏾‍♂️ Man Surfing: Medium-Dark Skin Tone 🏄🏿‍♂️ Man Surfing: Dark Skin Tone 🏄‍♀️ Woman Surfing 🏄🏻‍♀️ Woman Surfing: Light Skin Tone 🏄🏼‍♀️ Woman Surfing: Medium-Light Skin Tone 🏄🏽‍♀️ Woman Surfing: Medium Skin Tone 🏄🏾‍♀️ Woman Surfing: Medium-Dark Skin Tone 🏄🏿‍♀️ Woman Surfing: Dark Skin Tone 🚣 Person Rowing Boat 🚣🏻 Person Rowing Boat: Light Skin Tone 🚣🏼 Person Rowing Boat: Medium-Light Skin Tone 🚣🏽 Person Rowing Boat: Medium Skin Tone 🚣🏾 Person Rowing Boat: Medium-Dark Skin Tone 🚣🏿 Person Rowing Boat: Dark Skin Tone 🚣‍♂️ Man Rowing Boat 🚣🏻‍♂️ Man Rowing Boat: Light Skin Tone 🚣🏼‍♂️ Man Rowing Boat: Medium-Light Skin Tone 🚣🏽‍♂️ Man Rowing Boat: Medium Skin Tone 🚣🏾‍♂️ Man Rowing Boat: Medium-Dark Skin Tone 🚣🏿‍♂️ Man Rowing Boat: Dark Skin Tone 🚣‍♀️ Woman Rowing Boat 🚣🏻‍♀️ Woman Rowing Boat: Light Skin Tone 🚣🏼‍♀️ Woman Rowing Boat: Medium-Light Skin Tone 🚣🏽‍♀️ Woman Rowing Boat: Medium Skin Tone 🚣🏾‍♀️ Woman Rowing Boat: Medium-Dark Skin Tone 🚣🏿‍♀️ Woman Rowing Boat: Dark Skin Tone 🏊 Person Swimming 🏊🏻 Person Swimming: Light Skin Tone 🏊🏼 Person Swimming: Medium-Light Skin Tone 🏊🏽 Person Swimming: Medium Skin Tone 🏊🏾 Person Swimming: Medium-Dark Skin Tone 🏊🏿 Person Swimming: Dark Skin Tone 🏊‍♂️ Man Swimming 🏊🏻‍♂️ Man Swimming: Light Skin Tone 🏊🏼‍♂️ Man Swimming: Medium-Light Skin Tone 🏊🏽‍♂️ Man Swimming: Medium Skin Tone 🏊🏾‍♂️ Man Swimming: Medium-Dark Skin Tone 🏊🏿‍♂️ Man Swimming: Dark Skin Tone 🏊‍♀️ Woman Swimming 🏊🏻‍♀️ Woman Swimming: Light Skin Tone 🏊🏼‍♀️ Woman Swimming: Medium-Light Skin Tone 🏊🏽‍♀️ Woman Swimming: Medium Skin Tone 🏊🏾‍♀️ Woman Swimming: Medium-Dark Skin Tone 🏊🏿‍♀️ Woman Swimming: Dark Skin Tone ⛹ Person Bouncing Ball ⛹🏻 Person Bouncing Ball: Light Skin Tone ⛹🏼 Person Bouncing Ball: Medium-Light Skin Tone ⛹🏽 Person Bouncing Ball: Medium Skin Tone ⛹🏾 Person Bouncing Ball: Medium-Dark Skin Tone ⛹🏿 Person Bouncing Ball: Dark Skin Tone ⛹️‍♂️ Man Bouncing Ball ⛹🏻‍♂️ Man Bouncing Ball: Light Skin Tone ⛹🏼‍♂️ Man Bouncing Ball: Medium-Light Skin Tone ⛹🏽‍♂️ Man Bouncing Ball: Medium Skin Tone ⛹🏾‍♂️ Man Bouncing Ball: Medium-Dark Skin Tone ⛹🏿‍♂️ Man Bouncing Ball: Dark Skin Tone ⛹️‍♀️ Woman Bouncing Ball ⛹🏻‍♀️ Woman Bouncing Ball: Light Skin Tone ⛹🏼‍♀️ Woman Bouncing Ball: Medium-Light Skin Tone ⛹🏽‍♀️ Woman Bouncing Ball: Medium Skin Tone ⛹🏾‍♀️ Woman Bouncing Ball: Medium-Dark Skin Tone ⛹🏿‍♀️ Woman Bouncing Ball: Dark Skin Tone 🏋 Person Lifting Weights 🏋🏻 Person Lifting Weights: Light Skin Tone 🏋🏼 Person Lifting Weights: Medium-Light Skin Tone 🏋🏽 Person Lifting Weights: Medium Skin Tone 🏋🏾 Person Lifting Weights: Medium-Dark Skin Tone 🏋🏿 Person Lifting Weights: Dark Skin Tone 🏋️‍♂️ Man Lifting Weights 🏋🏻‍♂️ Man Lifting Weights: Light Skin Tone 🏋🏼‍♂️ Man Lifting Weights: Medium-Light Skin Tone 🏋🏽‍♂️ Man Lifting Weights: Medium Skin Tone 🏋🏾‍♂️ Man Lifting Weights: Medium-Dark Skin Tone 🏋🏿‍♂️ Man Lifting Weights: Dark Skin Tone 🏋️‍♀️ Woman Lifting Weights 🏋🏻‍♀️ Woman Lifting Weights: Light Skin Tone 🏋🏼‍♀️ Woman Lifting Weights: Medium-Light Skin Tone 🏋🏽‍♀️ Woman Lifting Weights: Medium Skin Tone 🏋🏾‍♀️ Woman Lifting Weights: Medium-Dark Skin Tone 🏋🏿‍♀️ Woman Lifting Weights: Dark Skin Tone 🚴 Person Biking 🚴🏻 Person Biking: Light Skin Tone 🚴🏼 Person Biking: Medium-Light Skin Tone 🚴🏽 Person Biking: Medium Skin Tone 🚴🏾 Person Biking: Medium-Dark Skin Tone 🚴🏿 Person Biking: Dark Skin Tone 🚴‍♂️ Man Biking 🚴🏻‍♂️ Man Biking: Light Skin Tone 🚴🏼‍♂️ Man Biking: Medium-Light Skin Tone 🚴🏽‍♂️ Man Biking: Medium Skin Tone 🚴🏾‍♂️ Man Biking: Medium-Dark Skin Tone 🚴🏿‍♂️ Man Biking: Dark Skin Tone 🚴‍♀️ Woman Biking 🚴🏻‍♀️ Woman Biking: Light Skin Tone 🚴🏼‍♀️ Woman Biking: Medium-Light Skin Tone 🚴🏽‍♀️ Woman Biking: Medium Skin Tone 🚴🏾‍♀️ Woman Biking: Medium-Dark Skin Tone 🚴🏿‍♀️ Woman Biking: Dark Skin Tone 🚵 Person Mountain Biking 🚵🏻 Person Mountain Biking: Light Skin Tone 🚵🏼 Person Mountain Biking: Medium-Light Skin Tone 🚵🏽 Person Mountain Biking: Medium Skin Tone 🚵🏾 Person Mountain Biking: Medium-Dark Skin Tone 🚵🏿 Person Mountain Biking: Dark Skin Tone 🚵‍♂️ Man Mountain Biking 🚵🏻‍♂️ Man Mountain Biking: Light Skin Tone 🚵🏼‍♂️ Man Mountain Biking: Medium-Light Skin Tone 🚵🏽‍♂️ Man Mountain Biking: Medium Skin Tone 🚵🏾‍♂️ Man Mountain Biking: Medium-Dark Skin Tone 🚵🏿‍♂️ Man Mountain Biking: Dark Skin Tone 🚵‍♀️ Woman Mountain Biking 🚵🏻‍♀️ Woman Mountain Biking: Light Skin Tone 🚵🏼‍♀️ Woman Mountain Biking: Medium-Light Skin Tone 🚵🏽‍♀️ Woman Mountain Biking: Medium Skin Tone 🚵🏾‍♀️ Woman Mountain Biking: Medium-Dark Skin Tone 🚵🏿‍♀️ Woman Mountain Biking: Dark Skin Tone 🏎 Racing Car 🏍 Motorcycle 🤸 Person Cartwheeling 🤸🏻 Person Cartwheeling: Light Skin Tone 🤸🏼 Person Cartwheeling: Medium-Light Skin Tone 🤸🏽 Person Cartwheeling: Medium Skin Tone 🤸🏾 Person Cartwheeling: Medium-Dark Skin Tone 🤸🏿 Person Cartwheeling: Dark Skin Tone 🤸‍♂️ Man Cartwheeling 🤸🏻‍♂️ Man Cartwheeling: Light Skin Tone 🤸🏼‍♂️ Man Cartwheeling: Medium-Light Skin Tone 🤸🏽‍♂️ Man Cartwheeling: Medium Skin Tone 🤸🏾‍♂️ Man Cartwheeling: Medium-Dark Skin Tone 🤸🏿‍♂️ Man Cartwheeling: Dark Skin Tone 🤸‍♀️ Woman Cartwheeling 🤸🏻‍♀️ Woman Cartwheeling: Light Skin Tone 🤸🏼‍♀️ Woman Cartwheeling: Medium-Light Skin Tone 🤸🏽‍♀️ Woman Cartwheeling: Medium Skin Tone 🤸🏾‍♀️ Woman Cartwheeling: Medium-Dark Skin Tone 🤸🏿‍♀️ Woman Cartwheeling: Dark Skin Tone 🤼 People Wrestling 🤼‍♂️ Men Wrestling 🤼‍♀️ Women Wrestling 🤽 Person Playing Water Polo 🤽🏻 Person Playing Water Polo: Light Skin Tone 🤽🏼 Person Playing Water Polo: Medium-Light Skin Tone 🤽🏽 Person Playing Water Polo: Medium Skin Tone 🤽🏾 Person Playing Water Polo: Medium-Dark Skin Tone 🤽🏿 Person Playing Water Polo: Dark Skin Tone 🤽‍♂️ Man Playing Water Polo 🤽🏻‍♂️ Man Playing Water Polo: Light Skin Tone 🤽🏼‍♂️ Man Playing Water Polo: Medium-Light Skin Tone 🤽🏽‍♂️ Man Playing Water Polo: Medium Skin Tone 🤽🏾‍♂️ Man Playing Water Polo: Medium-Dark Skin Tone 🤽🏿‍♂️ Man Playing Water Polo: Dark Skin Tone 🤽‍♀️ Woman Playing Water Polo 🤽🏻‍♀️ Woman Playing Water Polo: Light Skin Tone 🤽🏼‍♀️ Woman Playing Water Polo: Medium-Light Skin Tone 🤽🏽‍♀️ Woman Playing Water Polo: Medium Skin Tone 🤽🏾‍♀️ Woman Playing Water Polo: Medium-Dark Skin Tone 🤽🏿‍♀️ Woman Playing Water Polo: Dark Skin Tone 🤾 Person Playing Handball 🤾🏻 Person Playing Handball: Light Skin Tone 🤾🏼 Person Playing Handball: Medium-Light Skin Tone 🤾🏽 Person Playing Handball: Medium Skin Tone 🤾🏾 Person Playing Handball: Medium-Dark Skin Tone 🤾🏿 Person Playing Handball: Dark Skin Tone 🤾‍♂️ Man Playing Handball 🤾🏻‍♂️ Man Playing Handball: Light Skin Tone 🤾🏼‍♂️ Man Playing Handball: Medium-Light Skin Tone 🤾🏽‍♂️ Man Playing Handball: Medium Skin Tone 🤾🏾‍♂️ Man Playing Handball: Medium-Dark Skin Tone 🤾🏿‍♂️ Man Playing Handball: Dark Skin Tone 🤾‍♀️ Woman Playing Handball 🤾🏻‍♀️ Woman Playing Handball: Light Skin Tone 🤾🏼‍♀️ Woman Playing Handball: Medium-Light Skin Tone 🤾🏽‍♀️ Woman Playing Handball: Medium Skin Tone 🤾🏾‍♀️ Woman Playing Handball: Medium-Dark Skin Tone 🤾🏿‍♀️ Woman Playing Handball: Dark Skin Tone 🤹 Person Juggling 🤹🏻 Person Juggling: Light Skin Tone 🤹🏼 Person Juggling: Medium-Light Skin Tone 🤹🏽 Person Juggling: Medium Skin Tone 🤹🏾 Person Juggling: Medium-Dark Skin Tone 🤹🏿 Person Juggling: Dark Skin Tone 🤹‍♂️ Man Juggling 🤹🏻‍♂️ Man Juggling: Light Skin Tone 🤹🏼‍♂️ Man Juggling: Medium-Light Skin Tone 🤹🏽‍♂️ Man Juggling: Medium Skin Tone 🤹🏾‍♂️ Man Juggling: Medium-Dark Skin Tone 🤹🏿‍♂️ Man Juggling: Dark Skin Tone 🤹‍♀️ Woman Juggling 🤹🏻‍♀️ Woman Juggling: Light Skin Tone 🤹🏼‍♀️ Woman Juggling: Medium-Light Skin Tone 🤹🏽‍♀️ Woman Juggling: Medium Skin Tone 🤹🏾‍♀️ Woman Juggling: Medium-Dark Skin Tone 🤹🏿‍♀️ Woman Juggling: Dark Skin Tone 🤼🏻 Wrestlers, Type-1-2 🤼🏼 Wrestlers, Type-3 👫 Man and Woman Holding Hands 🤼🏽 Wrestlers, Type-4 👬 Two Men Holding Hands 🤼🏾 Wrestlers, Type-5 👭 Two Women Holding Hands 🤼🏿 Wrestlers, Type-6 💏 Kiss 👩‍❤️‍💋‍👨 Kiss: Woman, Man 🤼🏻‍♂️ Men Wrestling, Type-1-2 🤼🏼‍♂️ Men Wrestling, Type-3 🤼🏽‍♂️ Men Wrestling, Type-4 👨‍❤️‍💋‍👨 Kiss: Man, Man 🤼🏾‍♂️ Men Wrestling, Type-5 🤼🏿‍♂️ Men Wrestling, Type-6 👩‍❤️‍💋‍👩 Kiss: Woman, Woman 🤼🏻‍♀️ Women Wrestling, Type-1-2 💑 Couple With Heart 🤼🏼‍♀️ Women Wrestling, Type-3 👩‍❤️‍👨 Couple With Heart: Woman, Man 🤼🏽‍♀️ Women Wrestling, Type-4 🤼🏾‍♀️ Women Wrestling, Type-5 👨‍❤️‍👨 Couple With Heart: Man, Man 🤼🏿‍♀️ Women Wrestling, Type-6 👩‍❤️‍👩 Couple With Heart: Woman, Woman 👪 Family 👨‍👩‍👦 Family: Man, Woman, Boy 👨‍👩‍👧 Family: Man, Woman, Girl 👨‍👩‍👧‍👦 Family: Man, Woman, Girl, Boy 👨‍👩‍👦‍👦 Family: Man, Woman, Boy, Boy 👨‍👩‍👧‍👧 Family: Man, Woman, Girl, Girl 👨‍👨‍👦 Family: Man, Man, Boy 👨‍👨‍👧 Family: Man, Man, Girl 👨‍👨‍👧‍👦 Family: Man, Man, Girl, Boy 👨‍👨‍👦‍👦 Family: Man, Man, Boy, Boy 👨‍👨‍👧‍👧 Family: Man, Man, Girl, Girl 👩‍👩‍👦 Family: Woman, Woman, Boy 👩‍👩‍👧 Family: Woman, Woman, Girl 👩‍👩‍👧‍👦 Family: Woman, Woman, Girl, Boy 👩‍👩‍👦‍👦 Family: Woman, Woman, Boy, Boy 👩‍👩‍👧‍👧 Family: Woman, Woman, Girl, Girl 👨‍👦 Family: Man, Boy 👨‍👦‍👦 Family: Man, Boy, Boy 👨‍👧 Family: Man, Girl 👨‍👧‍👦 Family: Man, Girl, Boy 👨‍👧‍👧 Family: Man, Girl, Girl 👩‍👦 Family: Woman, Boy 👩‍👦‍👦 Family: Woman, Boy, Boy 👩‍👧 Family: Woman, Girl 👩‍👧‍👦 Family: Woman, Girl, Boy 👩‍👧‍👧 Family: Woman, Girl, Girl 🤳 Selfie 🤳🏻 Selfie: Light Skin Tone 🤳🏼 Selfie: Medium-Light Skin Tone 🤳🏽 Selfie: Medium Skin Tone 🤳🏾 Selfie: Medium-Dark Skin Tone 🤳🏿 Selfie: Dark Skin Tone 💪 Flexed Biceps 💪🏻 Flexed Biceps: Light Skin Tone 💪🏼 Flexed Biceps: Medium-Light Skin Tone 💪🏽 Flexed Biceps: Medium Skin Tone 💪🏾 Flexed Biceps: Medium-Dark Skin Tone 💪🏿 Flexed Biceps: Dark Skin Tone 👈 Backhand Index Pointing Left 👈🏻 Backhand Index Pointing Left: Light Skin Tone 👈🏼 Backhand Index Pointing Left: Medium-Light Skin Tone 👈🏽 Backhand Index Pointing Left: Medium Skin Tone 👈🏾 Backhand Index Pointing Left: Medium-Dark Skin Tone 👈🏿 Backhand Index Pointing Left: Dark Skin Tone 👉 Backhand Index Pointing Right 👉🏻 Backhand Index Pointing Right: Light Skin Tone 👉🏼 Backhand Index Pointing Right: Medium-Light Skin Tone 👉🏽 Backhand Index Pointing Right: Medium Skin Tone 👉🏾 Backhand Index Pointing Right: Medium-Dark Skin Tone 👉🏿 Backhand Index Pointing Right: Dark Skin Tone ☝ Index Pointing Up ☝🏻 Index Pointing Up: Light Skin Tone ☝🏼 Index Pointing Up: Medium-Light Skin Tone ☝🏽 Index Pointing Up: Medium Skin Tone ☝🏾 Index Pointing Up: Medium-Dark Skin Tone ☝🏿 Index Pointing Up: Dark Skin Tone 👆 Backhand Index Pointing Up 👆🏻 Backhand Index Pointing Up: Light Skin Tone 👆🏼 Backhand Index Pointing Up: Medium-Light Skin Tone 👆🏽 Backhand Index Pointing Up: Medium Skin Tone 👆🏾 Backhand Index Pointing Up: Medium-Dark Skin Tone 👆🏿 Backhand Index Pointing Up: Dark Skin Tone 🖕 Middle Finger 🖕🏻 Middle Finger: Light Skin Tone 🖕🏼 Middle Finger: Medium-Light Skin Tone 🖕🏽 Middle Finger: Medium Skin Tone 🖕🏾 Middle Finger: Medium-Dark Skin Tone 🖕🏿 Middle Finger: Dark Skin Tone 👇 Backhand Index Pointing Down 👇🏻 Backhand Index Pointing Down: Light Skin Tone 👇🏼 Backhand Index Pointing Down: Medium-Light Skin Tone 👇🏽 Backhand Index Pointing Down: Medium Skin Tone 👇🏾 Backhand Index Pointing Down: Medium-Dark Skin Tone 👇🏿 Backhand Index Pointing Down: Dark Skin Tone ✌ Victory Hand ✌🏻 Victory Hand: Light Skin Tone ✌🏼 Victory Hand: Medium-Light Skin Tone ✌🏽 Victory Hand: Medium Skin Tone ✌🏾 Victory Hand: Medium-Dark Skin Tone ✌🏿 Victory Hand: Dark Skin Tone 🤞 Crossed Fingers 🤞🏻 Crossed Fingers: Light Skin Tone 🤞🏼 Crossed Fingers: Medium-Light Skin Tone 🤞🏽 Crossed Fingers: Medium Skin Tone 🤞🏾 Crossed Fingers: Medium-Dark Skin Tone 🤞🏿 Crossed Fingers: Dark Skin Tone 🖖 Vulcan Salute 🖖🏻 Vulcan Salute: Light Skin Tone 🖖🏼 Vulcan Salute: Medium-Light Skin Tone 🖖🏽 Vulcan Salute: Medium Skin Tone 🖖🏾 Vulcan Salute: Medium-Dark Skin Tone 🖖🏿 Vulcan Salute: Dark Skin Tone 🤘 Sign of the Horns 🤘🏻 Sign of the Horns: Light Skin Tone 🤘🏼 Sign of the Horns: Medium-Light Skin Tone 🤘🏽 Sign of the Horns: Medium Skin Tone 🤘🏾 Sign of the Horns: Medium-Dark Skin Tone 🤘🏿 Sign of the Horns: Dark Skin Tone 🤙 Call Me Hand 🤙🏻 Call Me Hand: Light Skin Tone 🤙🏼 Call Me Hand: Medium-Light Skin Tone 🤙🏽 Call Me Hand: Medium Skin Tone 🤙🏾 Call Me Hand: Medium-Dark Skin Tone 🤙🏿 Call Me Hand: Dark Skin Tone 🖐 Raised Hand With Fingers Splayed 🖐🏻 Raised Hand With Fingers Splayed: Light Skin Tone 🖐🏼 Raised Hand With Fingers Splayed: Medium-Light Skin Tone 🖐🏽 Raised Hand With Fingers Splayed: Medium Skin Tone 🖐🏾 Raised Hand With Fingers Splayed: Medium-Dark Skin Tone 🖐🏿 Raised Hand With Fingers Splayed: Dark Skin Tone ✋ Raised Hand ✋🏻 Raised Hand: Light Skin Tone ✋🏼 Raised Hand: Medium-Light Skin Tone ✋🏽 Raised Hand: Medium Skin Tone ✋🏾 Raised Hand: Medium-Dark Skin Tone ✋🏿 Raised Hand: Dark Skin Tone 👌 OK Hand 👌🏻 OK Hand: Light Skin Tone 👌🏼 OK Hand: Medium-Light Skin Tone 👌🏽 OK Hand: Medium Skin Tone 👌🏾 OK Hand: Medium-Dark Skin Tone 👌🏿 OK Hand: Dark Skin Tone 👍 Thumbs Up 👍🏻 Thumbs Up: Light Skin Tone 👍🏼 Thumbs Up: Medium-Light Skin Tone 👍🏽 Thumbs Up: Medium Skin Tone 👍🏾 Thumbs Up: Medium-Dark Skin Tone 👍🏿 Thumbs Up: Dark Skin Tone 👎 Thumbs Down 👎🏻 Thumbs Down: Light Skin Tone 👎🏼 Thumbs Down: Medium-Light Skin Tone 👎🏽 Thumbs Down: Medium Skin Tone 👎🏾 Thumbs Down: Medium-Dark Skin Tone 👎🏿 Thumbs Down: Dark Skin Tone ✊ Raised Fist ✊🏻 Raised Fist: Light Skin Tone ✊🏼 Raised Fist: Medium-Light Skin Tone ✊🏽 Raised Fist: Medium Skin Tone ✊🏾 Raised Fist: Medium-Dark Skin Tone ✊🏿 Raised Fist: Dark Skin Tone 👊 Oncoming Fist 👊🏻 Oncoming Fist: Light Skin Tone 👊🏼 Oncoming Fist: Medium-Light Skin Tone 👊🏽 Oncoming Fist: Medium Skin Tone 👊🏾 Oncoming Fist: Medium-Dark Skin Tone 👊🏿 Oncoming Fist: Dark Skin Tone 🤛 Left-Facing Fist 🤛🏻 Left-Facing Fist: Light Skin Tone 🤛🏼 Left-Facing Fist: Medium-Light Skin Tone 🤛🏽 Left-Facing Fist: Medium Skin Tone 🤛🏾 Left-Facing Fist: Medium-Dark Skin Tone 🤛🏿 Left-Facing Fist: Dark Skin Tone 🤜 Right-Facing Fist 🤜🏻 Right-Facing Fist: Light Skin Tone 🤜🏼 Right-Facing Fist: Medium-Light Skin Tone 🤜🏽 Right-Facing Fist: Medium Skin Tone 🤜🏾 Right-Facing Fist: Medium-Dark Skin Tone 🤜🏿 Right-Facing Fist: Dark Skin Tone 🤚 Raised Back of Hand 🤚🏻 Raised Back of Hand: Light Skin Tone 🤚🏼 Raised Back of Hand: Medium-Light Skin Tone 🤚🏽 Raised Back of Hand: Medium Skin Tone 🤚🏾 Raised Back of Hand: Medium-Dark Skin Tone 🤚🏿 Raised Back of Hand: Dark Skin Tone 👋 Waving Hand 👋🏻 Waving Hand: Light Skin Tone 👋🏼 Waving Hand: Medium-Light Skin Tone 👋🏽 Waving Hand: Medium Skin Tone 👋🏾 Waving Hand: Medium-Dark Skin Tone 👋🏿 Waving Hand: Dark Skin Tone 🤟 Love-You Gesture 🤟🏻 Love-You Gesture: Light Skin Tone 🤟🏼 Love-You Gesture: Medium-Light Skin Tone 🤟🏽 Love-You Gesture: Medium Skin Tone 🤟🏾 Love-You Gesture: Medium-Dark Skin Tone 🤟🏿 Love-You Gesture: Dark Skin Tone ✍ Writing Hand ✍🏻 Writing Hand: Light Skin Tone ✍🏼 Writing Hand: Medium-Light Skin Tone ✍🏽 Writing Hand: Medium Skin Tone ✍🏾 Writing Hand: Medium-Dark Skin Tone ✍🏿 Writing Hand: Dark Skin Tone 👏 Clapping Hands 👏🏻 Clapping Hands: Light Skin Tone 👏🏼 Clapping Hands: Medium-Light Skin Tone 👏🏽 Clapping Hands: Medium Skin Tone 👏🏾 Clapping Hands: Medium-Dark Skin Tone 👏🏿 Clapping Hands: Dark Skin Tone 👐 Open Hands 👐🏻 Open Hands: Light Skin Tone 👐🏼 Open Hands: Medium-Light Skin Tone 👐🏽 Open Hands: Medium Skin Tone 👐🏾 Open Hands: Medium-Dark Skin Tone 👐🏿 Open Hands: Dark Skin Tone 🙌 Raising Hands 🙌🏻 Raising Hands: Light Skin Tone 🙌🏼 Raising Hands: Medium-Light Skin Tone 🙌🏽 Raising Hands: Medium Skin Tone 🙌🏾 Raising Hands: Medium-Dark Skin Tone 🙌🏿 Raising Hands: Dark Skin Tone 🤲 Palms Up Together 🤲🏻 Palms Up Together: Light Skin Tone 🤲🏼 Palms Up Together: Medium-Light Skin Tone 🤲🏽 Palms Up Together: Medium Skin Tone 🤲🏾 Palms Up Together: Medium-Dark Skin Tone 🤲🏿 Palms Up Together: Dark Skin Tone 🙏 Folded Hands 🙏🏻 Folded Hands: Light Skin Tone 🙏🏼 Folded Hands: Medium-Light Skin Tone 🙏🏽 Folded Hands: Medium Skin Tone 🙏🏾 Folded Hands: Medium-Dark Skin Tone 🙏🏿 Folded Hands: Dark Skin Tone 🤝 Handshake 💅 Nail Polish 💅🏻 Nail Polish: Light Skin Tone 💅🏼 Nail Polish: Medium-Light Skin Tone 💅🏽 Nail Polish: Medium Skin Tone 💅🏾 Nail Polish: Medium-Dark Skin Tone 💅🏿 Nail Polish: Dark Skin Tone 👂 Ear 👂🏻 Ear: Light Skin Tone 👂🏼 Ear: Medium-Light Skin Tone 👂🏽 Ear: Medium Skin Tone 👂🏾 Ear: Medium-Dark Skin Tone 👂🏿 Ear: Dark Skin Tone 👃 Nose 👃🏻 Nose: Light Skin Tone 👃🏼 Nose: Medium-Light Skin Tone 👃🏽 Nose: Medium Skin Tone 👃🏾 Nose: Medium-Dark Skin Tone 👃🏿 Nose: Dark Skin Tone 👣 Footprints 👀 Eyes 👁 Eye 👁️‍🗨️ Eye in Speech Bubble 🧠 Brain 👅 Tongue 👄 Mouth 💋 Kiss Mark 💘 Heart With Arrow ❤ Red Heart 💓 Beating Heart 💔 Broken Heart 💕 Two Hearts 💖 Sparkling Heart 💗 Growing Heart 💙 Blue Heart 💚 Green Heart 💛 Yellow Heart 🧡 Orange Heart 💜 Purple Heart 🖤 Black Heart 💝 Heart With Ribbon 💞 Revolving Hearts 💟 Heart Decoration ❣ Heavy Heart Exclamation 💌 Love Letter 💤 Zzz 💢 Anger Symbol 💣 Bomb 💥 Collision 💦 Sweat Droplets 💨 Dashing Away 💫 Dizzy 💬 Speech Balloon 🗨 Left Speech Bubble 🗯 Right Anger Bubble 💭 Thought Balloon 🕳 Hole 👓 Glasses 🕶 Sunglasses 👔 Necktie 👕 T-Shirt 👖 Jeans 🧣 Scarf 🧤 Gloves 🧥 Coat 🧦 Socks 👗 Dress 👘 Kimono 👙 Bikini 👚 Woman’s Clothes 👛 Purse 👜 Handbag 👝 Clutch Bag 🛍 Shopping Bags 🎒 School Backpack 👞 Man’s Shoe 👟 Running Shoe 👠 High-Heeled Shoe 👡 Woman’s Sandal 👢 Woman’s Boot 👑 Crown 👒 Woman’s Hat 🎩 Top Hat 🎓 Graduation Cap 🧢 Billed Cap ⛑ Rescue Worker’s Helmet 📿 Prayer Beads 💄 Lipstick 💍 Ring 💎 Gem Stone 🐵 Monkey Face 🐒 Monkey 🦍 Gorilla 🐶 Dog Face 🐕 Dog 🐩 Poodle 🐺 Wolf Face 🦊 Fox Face 🐱 Cat Face 🐈 Cat 🦁 Lion Face 🐯 Tiger Face 🐅 Tiger 🐆 Leopard 🐴 Horse Face 🐎 Horse 🦄 Unicorn Face 🦓 Zebra 🦌 Deer 🐮 Cow Face 🐂 Ox 🐃 Water Buffalo 🐄 Cow 🐷 Pig Face 🐖 Pig 🐗 Boar 🐽 Pig Nose 🐏 Ram 🐑 Ewe 🐐 Goat 🐪 Camel 🐫 Two-Hump Camel 🦒 Giraffe 🐘 Elephant 🦏 Rhinoceros 🐭 Mouse Face 🐁 Mouse 🐀 Rat 🐹 Hamster Face 🐰 Rabbit Face 🐇 Rabbit 🐿 Chipmunk 🦔 Hedgehog 🦇 Bat 🐻 Bear Face 🐨 Koala 🐼 Panda Face 🐾 Paw Prints 🦃 Turkey 🐔 Chicken 🐓 Rooster 🐣 Hatching Chick 🐤 Baby Chick 🐥 Front-Facing Baby Chick 🐦 Bird 🐧 Penguin 🕊 Dove 🦅 Eagle 🦆 Duck 🦉 Owl 🐸 Frog Face 🐊 Crocodile 🐢 Turtle 🦎 Lizard 🐍 Snake 🐲 Dragon Face 🐉 Dragon 🦕 Sauropod 🦖 T-Rex 🐳 Spouting Whale 🐋 Whale 🐬 Dolphin 🐟 Fish 🐠 Tropical Fish 🐡 Blowfish 🦈 Shark 🐙 Octopus 🐚 Spiral Shell 🦀 Crab 🦐 Shrimp 🦑 Squid 🐌 Snail 🦋 Butterfly 🐛 Bug 🐜 Ant 🐝 Honeybee 🐞 Lady Beetle 🦗 Cricket 🕷 Spider 🕸 Spider Web 🦂 Scorpion 💐 Bouquet 🌸 Cherry Blossom 💮 White Flower 🏵 Rosette 🌹 Rose 🥀 Wilted Flower 🌺 Hibiscus 🌻 Sunflower 🌼 Blossom 🌷 Tulip 🌱 Seedling 🌲 Evergreen Tree 🌳 Deciduous Tree 🌴 Palm Tree 🌵 Cactus 🌾 Sheaf of Rice 🌿 Herb ☘ Shamrock 🍀 Four Leaf Clover 🍁 Maple Leaf 🍂 Fallen Leaf 🍃 Leaf Fluttering in Wind 🍇 Grapes 🍈 Melon 🍉 Watermelon 🍊 Tangerine 🍋 Lemon 🍌 Banana 🍍 Pineapple 🍎 Red Apple 🍏 Green Apple 🍐 Pear 🍑 Peach 🍒 Cherries 🍓 Strawberry 🥝 Kiwi Fruit 🍅 Tomato 🥥 Coconut 🥑 Avocado 🍆 Eggplant 🥔 Potato 🥕 Carrot 🌽 Ear of Corn 🌶 Hot Pepper 🥒 Cucumber 🥦 Broccoli 🍄 Mushroom 🥜 Peanuts 🌰 Chestnut 🍞 Bread 🥐 Croissant 🥖 Baguette Bread 🥨 Pretzel 🥞 Pancakes 🧀 Cheese Wedge 🍖 Meat on Bone 🍗 Poultry Leg 🥩 Cut of Meat 🥓 Bacon 🍔 Hamburger 🍟 French Fries 🍕 Pizza 🌭 Hot Dog 🥪 Sandwich 🌮 Taco 🌯 Burrito 🥙 Stuffed Flatbread 🥚 Egg 🍳 Cooking 🥘 Shallow Pan of Food 🍲 Pot of Food 🥣 Bowl With Spoon 🥗 Green Salad 🍿 Popcorn 🥫 Canned Food 🍱 Bento Box 🍘 Rice Cracker 🍙 Rice Ball 🍚 Cooked Rice 🍛 Curry Rice 🍜 Steaming Bowl 🍝 Spaghetti 🍠 Roasted Sweet Potato 🍢 Oden 🍣 Sushi 🍤 Fried Shrimp 🍥 Fish Cake With Swirl 🍡 Dango 🥟 Dumpling 🥠 Fortune Cookie 🥡 Takeout Box 🍦 Soft Ice Cream 🍧 Shaved Ice 🍨 Ice Cream 🍩 Doughnut 🍪 Cookie 🎂 Birthday Cake 🍰 Shortcake 🥧 Pie 🍫 Chocolate Bar 🍬 Candy 🍭 Lollipop 🍮 Custard 🍯 Honey Pot 🍼 Baby Bottle 🥛 Glass of Milk ☕ Hot Beverage 🍵 Teacup Without Handle 🍶 Sake 🍾 Bottle With Popping Cork 🍷 Wine Glass 🍸 Cocktail Glass 🍹 Tropical Drink 🍺 Beer Mug 🍻 Clinking Beer Mugs 🥂 Clinking Glasses 🥃 Tumbler Glass 🥤 Cup With Straw 🥢 Chopsticks 🍽 Fork and Knife With Plate 🍴 Fork and Knife 🥄 Spoon 🔪 Kitchen Knife 🏺 Amphora 🌍 Globe Showing Europe-Africa 🌎 Globe Showing Americas 🌏 Globe Showing Asia-Australia 🌐 Globe With Meridians 🗺 World Map 🗾 Map of Japan 🏔 Snow-Capped Mountain ⛰ Mountain 🌋 Volcano 🗻 Mount Fuji 🏕 Camping 🏖 Beach With Umbrella 🏜 Desert 🏝 Desert Island 🏞 National Park 🏟 Stadium 🏛 Classical Building 🏗 Building Construction 🏘 House 🏙 Cityscape 🏚 Derelict House 🏠 House 🏡 House With Garden 🏢 Office Building 🏣 Japanese Post Office 🏤 Post Office 🏥 Hospital 🏦 Bank 🏨 Hotel 🏩 Love Hotel 🏪 Convenience Store 🏫 School 🏬 Department Store 🏭 Factory 🏯 Japanese Castle 🏰 Castle 💒 Wedding 🗼 Tokyo Tower 🗽 Statue of Liberty ⛪ Church 🕌 Mosque 🕍 Synagogue ⛩ Shinto Shrine 🕋 Kaaba ⛲ Fountain ⛺ Tent 🌁 Foggy 🌃 Night With Stars 🌄 Sunrise Over Mountains 🌅 Sunrise 🌆 Cityscape at Dusk 🌇 Sunset 🌉 Bridge at Night ♨ Hot Springs 🌌 Milky Way 🎠 Carousel Horse 🎡 Ferris Wheel 🎢 Roller Coaster 💈 Barber Pole 🎪 Circus Tent 🎭 Performing Arts 🖼 Framed Picture 🎨 Artist Palette 🎰 Slot Machine 🚂 Locomotive 🚃 Railway Car 🚄 High-Speed Train 🚅 High-Speed Train With Bullet Nose 🚆 Train 🚇 Metro 🚈 Light Rail 🚉 Station 🚊 Tram 🚝 Monorail 🚞 Mountain Railway 🚋 Tram Car 🚌 Bus 🚍 Oncoming Bus 🚎 Trolleybus 🚐 Minibus 🚑 Ambulance 🚒 Fire Engine 🚓 Police Car 🚔 Oncoming Police Car 🚕 Taxi 🚖 Oncoming Taxi 🚗 Automobile 🚘 Oncoming Automobile 🚙 Sport Utility Vehicle 🚚 Delivery Truck 🚛 Articulated Lorry 🚜 Tractor 🚲 Bicycle 🛴 Kick Scooter 🛵 Motor Scooter 🚏 Bus Stop 🛣 Motorway 🛤 Railway Track ⛽ Fuel Pump 🚨 Police Car Light 🚥 Horizontal Traffic Light 🚦 Vertical Traffic Light 🚧 Construction 🛑 Stop Sign ⚓ Anchor ⛵ Sailboat 🛶 Canoe 🚤 Speedboat 🛳 Passenger Ship ⛴ Ferry 🛥 Motor Boat 🚢 Ship ✈ Airplane 🛩 Small Airplane 🛫 Airplane Departure 🛬 Airplane Arrival 💺 Seat 🚁 Helicopter 🚟 Suspension Railway 🚠 Mountain Cableway 🚡 Aerial Tramway 🛰 Satellite 🚀 Rocket 🛸 Flying Saucer 🛎 Bellhop Bell 🚪 Door 🛏 Bed 🛋 Couch and Lamp 🚽 Toilet 🚿 Shower 🛁 Bathtub ⌛ Hourglass ⏳ Hourglass With Flowing Sand ⌚ Watch ⏰ Alarm Clock ⏱ Stopwatch ⏲ Timer Clock 🕰 Mantelpiece Clock 🕛 Twelve O’clock 🕧 Twelve-Thirty 🕐 One O’clock 🕜 One-Thirty 🕑 Two O’clock 🕝 Two-Thirty 🕒 Three O’clock 🕞 Three-Thirty 🕓 Four O’clock 🕟 Four-Thirty 🕔 Five O’clock 🕠 Five-Thirty 🕕 Six O’clock 🕡 Six-Thirty 🕖 Seven O’clock 🕢 Seven-Thirty 🕗 Eight O’clock 🕣 Eight-Thirty 🕘 Nine O’clock 🕤 Nine-Thirty 🕙 Ten O’clock 🕥 Ten-Thirty 🕚 Eleven O’clock 🕦 Eleven-Thirty 🌑 New Moon 🌒 Waxing Crescent Moon 🌓 First Quarter Moon 🌔 Waxing Gibbous Moon 🌕 Full Moon 🌖 Waning Gibbous Moon 🌗 Last Quarter Moon 🌘 Waning Crescent Moon 🌙 Crescent Moon 🌚 New Moon Face 🌛 First Quarter Moon With Face 🌜 Last Quarter Moon With Face 🌡 Thermometer ☀ Sun 🌝 Full Moon With Face 🌞 Sun With Face ⭐ White Medium Star 🌟 Glowing Star 🌠 Shooting Star ☁ Cloud ⛅ Sun Behind Cloud ⛈ Cloud With Lightning and Rain 🌤 Sun Behind Small Cloud 🌥 Sun Behind Large Cloud 🌦 Sun Behind Rain Cloud 🌧 Cloud With Rain 🌨 Cloud With Snow 🌩 Cloud With Lightning 🌪 Tornado 🌫 Fog 🌬 Wind Face 🌀 Cyclone 🌈 Rainbow 🌂 Closed Umbrella ☂ Umbrella ☔ Umbrella With Rain Drops ⛱ Umbrella on Ground ⚡ High Voltage ❄ Snowflake ☃ Snowman ⛄ Snowman Without Snow ☄ Comet 🔥 Fire 💧 Droplet 🌊 Water Wave 🎃 Jack-O-Lantern 🎄 Christmas Tree 🎆 Fireworks 🎇 Sparkler ✨ Sparkles 🎈 Balloon 🎉 Party Popper 🎊 Confetti Ball 🎋 Tanabata Tree 🎍 Pine Decoration 🎎 Japanese Dolls 🎏 Carp Streamer 🎐 Wind Chime 🎑 Moon Viewing Ceremony 🎀 Ribbon 🎁 Wrapped Gift 🎗 Reminder Ribbon 🎟 Admission Tickets 🎫 Ticket 🎖 Military Medal 🏆 Trophy 🏅 Sports Medal 🥇 1st Place Medal 🥈 2nd Place Medal 🥉 3rd Place Medal ⚽ Soccer Ball ⚾ Baseball 🏀 Basketball 🏐 Volleyball 🏈 American Football 🏉 Rugby Football 🎾 Tennis 🎱 Pool 8 Ball 🎳 Bowling 🏏 Cricket 🏑 Field Hockey 🏒 Ice Hockey 🏓 Ping Pong 🏸 Badminton 🥊 Boxing Glove 🥋 Martial Arts Uniform 🥅 Goal Net 🎯 Direct Hit ⛳ Flag in Hole ⛸ Ice Skate 🎣 Fishing Pole 🎽 Running Shirt 🎿 Skis 🛷 Sled 🥌 Curling Stone 🎮 Video Game 🕹 Joystick 🎲 Game Die ♠ Spade Suit ♥ Heart Suit ♦ Diamond Suit ♣ Club Suit 🃏 Joker 🀄 Mahjong Red Dragon 🎴 Flower Playing Cards 🔇 Muted Speaker 🔈 Speaker Low Volume 🔉 Speaker Medium Volume 🔊 Speaker High Volume 📢 Loudspeaker 📣 Megaphone 📯 Postal Horn 🔔 Bell 🔕 Bell With Slash 🎼 Musical Score 🎵 Musical Note 🎶 Musical Notes 🎙 Studio Microphone 🎚 Level Slider 🎛 Control Knobs 🎤 Microphone 🎧 Headphone 📻 Radio 🎷 Saxophone 🎸 Guitar 🎹 Musical Keyboard 🎺 Trumpet 🎻 Violin 🥁 Drum 📱 Mobile Phone 📲 Mobile Phone With Arrow ☎ Telephone 📞 Telephone Receiver 📟 Pager 📠 Fax Machine 🔋 Battery 🔌 Electric Plug 💻 Laptop Computer 🖥 Desktop Computer 🖨 Printer ⌨ Keyboard 🖱 Computer Mouse 🖲 Trackball 💽 Computer Disk 💾 Floppy Disk 💿 Optical Disk 📀 DVD 🎥 Movie Camera 🎞 Film Frames 📽 Film Projector 🎬 Clapper Board 📺 Television 📷 Camera 📸 Camera With Flash 📹 Video Camera 📼 Videocassette 🔍 Left-Pointing Magnifying Glass 🔎 Right-Pointing Magnifying Glass 🔬 Microscope 🔭 Telescope 📡 Satellite Antenna 🕯 Candle 💡 Light Bulb 🔦 Flashlight 🏮 Red Paper Lantern 📔 Notebook With Decorative Cover 📕 Closed Book 📖 Open Book 📗 Green Book 📘 Blue Book 📙 Orange Book 📚 Books 📓 Notebook 📒 Ledger 📃 Page With Curl 📜 Scroll 📄 Page Facing Up 📰 Newspaper 🗞 Rolled-Up Newspaper 📑 Bookmark Tabs 🔖 Bookmark 🏷 Label 💰 Money Bag 💴 Yen Banknote 💵 Dollar Banknote 💶 Euro Banknote 💷 Pound Banknote 💸 Money With Wings 💳 Credit Card 💹 Chart Increasing With Yen 💱 Currency Exchange 💲 Heavy Dollar Sign ✉ Envelope 📧 E-Mail 📨 Incoming Envelope 📩 Envelope With Arrow 📤 Outbox Tray 📥 Inbox Tray 📦 Package 📫 Closed Mailbox With Raised Flag 📪 Closed Mailbox With Lowered Flag 📬 Open Mailbox With Raised Flag 📭 Open Mailbox With Lowered Flag 📮 Postbox 🗳 Ballot Box With Ballot ✏ Pencil ✒ Black Nib 🖋 Fountain Pen 🖊 Pen 🖌 Paintbrush 🖍 Crayon 📝 Memo 💼 Briefcase 📁 File Folder 📂 Open File Folder 🗂 Card Index Dividers 📅 Calendar 📆 Tear-Off Calendar 🗒 Spiral Notepad 🗓 Spiral Calendar 📇 Card Index 📈 Chart Increasing 📉 Chart Decreasing 📊 Bar Chart 📋 Clipboard 📌 Pushpin 📍 Round Pushpin 📎 Paperclip 🖇 Linked Paperclips 📏 Straight Ruler 📐 Triangular Ruler ✂ Scissors 🗃 Card File Box 🗄 File Cabinet 🗑 Wastebasket 🔒 Locked 🔓 Unlocked 🔏 Locked With Pen 🔐 Locked With Key 🔑 Key 🗝 Old Key 🔨 Hammer ⛏ Pick ⚒ Hammer and Pick 🛠 Hammer and Wrench 🗡 Dagger ⚔ Crossed Swords 🔫 Pistol 🏹 Bow and Arrow 🛡 Shield 🔧 Wrench 🔩 Nut and Bolt ⚙ Gear 🗜 Clamp ⚗ Alembic ⚖ Balance Scale 🔗 Link ⛓ Chains 💉 Syringe 💊 Pill 🚬 Cigarette ⚰ Coffin ⚱ Funeral Urn 🗿 Moai 🛢 Oil Drum 🔮 Crystal Ball 🛒 Shopping Cart 🏧 Atm Sign 🚮 Litter in Bin Sign 🚰 Potable Water ♿ Wheelchair Symbol 🚹 Men’s Room 🚺 Women’s Room 🚻 Restroom 🚼 Baby Symbol 🚾 Water Closet 🛂 Passport Control 🛃 Customs 🛄 Baggage Claim 🛅 Left Luggage ⚠ Warning 🚸 Children Crossing ⛔ No Entry 🚫 Prohibited 🚳 No Bicycles 🚭 No Smoking 🚯 No Littering 🚱 Non-Potable Water 🚷 No Pedestrians 📵 No Mobile Phones 🔞 No One Under Eighteen ☢ Radioactive ☣ Biohazard ⬆ Up Arrow ↗ Up-Right Arrow ➡ Right Arrow ↘ Down-Right Arrow ⬇ Down Arrow ↙ Down-Left Arrow ⬅ Left Arrow ↖ Up-Left Arrow ↕ Up-Down Arrow ↔ Left-Right Arrow ↩ Right Arrow Curving Left ↪ Left Arrow Curving Right ⤴ Right Arrow Curving Up ⤵ Right Arrow Curving Down 🔃 Clockwise Vertical Arrows 🔄 Anticlockwise Arrows Button 🔙 Back Arrow 🔚 End Arrow 🔛 On! Arrow 🔜 Soon Arrow 🔝 Top Arrow 🛐 Place of Worship ⚛ Atom Symbol 🕉 Om ✡ Star of David ☸ Wheel of Dharma ☯ Yin Yang ✝ Latin Cross ☦ Orthodox Cross ☪ Star and Crescent ☮ Peace Symbol 🕎 Menorah 🔯 Dotted Six-Pointed Star ♈ Aries ♉ Taurus ♊ Gemini ♋ Cancer ♌ Leo ♍ Virgo ♎ Libra ♏ Scorpius ♐ Sagittarius ♑ Capricorn ♒ Aquarius ♓ Pisces ⛎ Ophiuchus 🔀 Shuffle Tracks Button 🔁 Repeat Button 🔂 Repeat Single Button ▶ Play Button ⏩ Fast-Forward Button ⏭ Next Track Button ⏯ Play or Pause Button ◀ Reverse Button ⏪ Fast Reverse Button ⏮ Last Track Button 🔼 Up Button ⏫ Fast Up Button 🔽 Down Button ⏬ Fast Down Button ⏸ Pause Button ⏹ Stop Button ⏺ Record Button ⏏ Eject Button 🎦 Cinema 🔅 Dim Button 🔆 Bright Button 📶 Antenna Bars 📳 Vibration Mode 📴 Mobile Phone Off ♀ Female Sign ♂ Male Sign ⚕ Medical Symbol ♻ Recycling Symbol ⚜ Fleur-De-Lis 🔱 Trident Emblem 📛 Name Badge 🔰 Japanese Symbol for Beginner ⭕ Heavy Large Circle ✅ White Heavy Check Mark ☑ Ballot Box With Check ✔ Heavy Check Mark ✖ Heavy Multiplication X ❌ Cross Mark ❎ Cross Mark Button ➕ Heavy Plus Sign ➖ Heavy Minus Sign ➗ Heavy Division Sign ➰ Curly Loop ➿ Double Curly Loop 〽 Part Alternation Mark ✳ Eight-Spoked Asterisk ✴ Eight-Pointed Star ❇ Sparkle ‼ Double Exclamation Mark ⁉ Exclamation Question Mark ❓ Question Mark ❔ White Question Mark ❕ White Exclamation Mark ❗ Exclamation Mark 〰 Wavy Dash © Copyright ® Registered ™ Trade Mark #️⃣ Keycap Number Sign *️⃣ Keycap Asterisk 0️⃣ Keycap Digit Zero 1️⃣ Keycap Digit One 2️⃣ Keycap Digit Two 3️⃣ Keycap Digit Three 4️⃣ Keycap Digit Four 5️⃣ Keycap Digit Five 6️⃣ Keycap Digit Six 7️⃣ Keycap Digit Seven 8️⃣ Keycap Digit Eight 9️⃣ Keycap Digit Nine 🔟 Keycap 10 💯 Hundred Points 🔠 Input Latin Uppercase 🔡 Input Latin Lowercase 🔢 Input Numbers 🔣 Input Symbols 🔤 Input Latin Letters 🅰 A Button (blood Type) 🆎 Ab Button (blood Type) 🅱 B Button (blood Type) 🆑 CL Button 🆒 Cool Button 🆓 Free Button ℹ Information 🆔 ID Button Ⓜ Circled M 🆕 New Button 🆖 NG Button 🅾 O Button (blood Type) 🆗 OK Button 🅿 P Button 🆘 SOS Button 🆙 Up! Button 🆚 Vs Button 🈁 Japanese “here” Button 🈂 Japanese “service Charge” Button 🈷 Japanese “monthly Amount” Button 🈶 Japanese “not Free of Charge” Button 🈯 Japanese “reserved” Button 🉐 Japanese “bargain” Button 🈹 Japanese “discount” Button 🈚 Japanese “free of Charge” Button 🈲 Japanese “prohibited” Button 🉑 Japanese “acceptable” Button 🈸 Japanese “application” Button 🈴 Japanese “passing Grade” Button 🈳 Japanese “vacancy” Button ㊗ Japanese “congratulations” Button ㊙ Japanese “secret” Button 🈺 Japanese “open for Business” Button 🈵 Japanese “no Vacancy” Button ▪ Black Small Square ▫ White Small Square ◻ White Medium Square ◼ Black Medium Square ◽ White Medium-Small Square ◾ Black Medium-Small Square ⬛ Black Large Square ⬜ White Large Square 🔶 Large Orange Diamond 🔷 Large Blue Diamond 🔸 Small Orange Diamond 🔹 Small Blue Diamond 🔺 Red Triangle Pointed Up 🔻 Red Triangle Pointed Down 💠 Diamond With a Dot 🔘 Radio Button 🔲 Black Square Button 🔳 White Square Button ⚪ White Circle ⚫ Black Circle 🔴 Red Circle 🔵 Blue Circle 🏁 Chequered Flag 🚩 Triangular Flag 🎌 Crossed Flags 🏴 Black Flag 🏳 White Flag 🏳️‍🌈 Rainbow Flag 🇦🇨 Ascension Island 🇦🇩 Andorra 🇦🇪 United Arab Emirates 🇦🇫 Afghanistan 🇦🇬 Antigua & Barbuda 🇦🇮 Anguilla 🇦🇱 Albania 🇦🇲 Armenia 🇦🇴 Angola 🇦🇶 Antarctica 🇦🇷 Argentina 🇦🇸 American Samoa 🇦🇹 Austria 🇦🇺 Australia 🇦🇼 Aruba 🇦🇽 Åland Islands 🇦🇿 Azerbaijan 🇧🇦 Bosnia & Herzegovina 🇧🇧 Barbados 🇧🇩 Bangladesh 🇧🇪 Belgium 🇧🇫 Burkina Faso 🇧🇬 Bulgaria 🇧🇭 Bahrain 🇧🇮 Burundi 🇧🇯 Benin 🇧🇱 St. Barthélemy 🇧🇲 Bermuda 🇧🇳 Brunei 🇧🇴 Bolivia 🇧🇶 Caribbean Netherlands 🇧🇷 Brazil 🇧🇸 Bahamas 🇧🇹 Bhutan 🇧🇻 Bouvet Island 🇧🇼 Botswana 🇧🇾 Belarus 🇧🇿 Belize 🇨🇦 Canada 🇨🇨 Cocos (Keeling) Islands 🇨🇩 Congo - Kinshasa 🇨🇫 Central African Republic 🇨🇬 Congo - Brazzaville 🇨🇭 Switzerland 🇨🇮 Côte D’Ivoire 🇨🇰 Cook Islands 🇨🇱 Chile 🇨🇲 Cameroon 🇨🇳 China 🇨🇴 Colombia 🇨🇵 Clipperton Island 🇨🇷 Costa Rica 🇨🇺 Cuba 🇨🇻 Cape Verde 🇨🇼 Curaçao 🇨🇽 Christmas Island 🇨🇾 Cyprus 🇨🇿 Czechia 🇩🇪 Germany 🇩🇬 Diego Garcia 🇩🇯 Djibouti 🇩🇰 Denmark 🇩🇲 Dominica 🇩🇴 Dominican Republic 🇩🇿 Algeria 🇪🇦 Ceuta & Melilla 🇪🇨 Ecuador 🇪🇪 Estonia 🇪🇬 Egypt 🇪🇭 Western Sahara 🇪🇷 Eritrea 🇪🇸 Spain 🇪🇹 Ethiopia 🇪🇺 European Union 🇫🇮 Finland 🇫🇯 Fiji 🇫🇰 Falkland Islands 🇫🇲 Micronesia 🇫🇴 Faroe Islands 🇫🇷 France 🇬🇦 Gabon 🇬🇧 United Kingdom 🇬🇩 Grenada 🇬🇪 Georgia 🇬🇫 French Guiana 🇬🇬 Guernsey 🇬🇭 Ghana 🇬🇮 Gibraltar 🇬🇱 Greenland 🇬🇲 Gambia 🇬🇳 Guinea 🇬🇵 Guadeloupe 🇬🇶 Equatorial Guinea 🇬🇷 Greece 🇬🇸 South Georgia & South Sandwich Islands 🇬🇹 Guatemala 🇬🇺 Guam 🇬🇼 Guinea-Bissau 🇬🇾 Guyana 🇭🇰 Hong Kong Sar China 🇭🇲 Heard & Mcdonald Islands 🇭🇳 Honduras 🇭🇷 Croatia 🇭🇹 Haiti 🇭🇺 Hungary 🇮🇨 Canary Islands 🇮🇩 Indonesia 🇮🇪 Ireland 🇮🇱 Israel 🇮🇲 Isle of Man 🇮🇳 India 🇮🇴 British Indian Ocean Territory 🇮🇶 Iraq 🇮🇷 Iran 🇮🇸 Iceland 🇮🇹 Italy 🇯🇪 Jersey 🇯🇲 Jamaica 🇯🇴 Jordan 🇯🇵 Japan 🇰🇪 Kenya 🇰🇬 Kyrgyzstan 🇰🇭 Cambodia 🇰🇮 Kiribati 🇰🇲 Comoros 🇰🇳 St. Kitts & Nevis 🇰🇵 North Korea 🇰🇷 South Korea 🇰🇼 Kuwait 🇰🇾 Cayman Islands 🇰🇿 Kazakhstan 🇱🇦 Laos 🇱🇧 Lebanon 🇱🇨 St. Lucia 🇱🇮 Liechtenstein 🇱🇰 Sri Lanka 🇱🇷 Liberia 🇱🇸 Lesotho 🇱🇹 Lithuania 🇱🇺 Luxembourg 🇱🇻 Latvia 🇱🇾 Libya 🇲🇦 Morocco 🇲🇨 Monaco 🇲🇩 Moldova 🇲🇪 Montenegro 🇲🇫 St. Martin 🇲🇬 Madagascar 🇲🇭 Marshall Islands 🇲🇰 Macedonia 🇲🇱 Mali 🇲🇲 Myanmar (Burma) 🇲🇳 Mongolia 🇲🇴 Macau Sar China 🇲🇵 Northern Mariana Islands 🇲🇶 Martinique 🇲🇷 Mauritania 🇲🇸 Montserrat 🇲🇹 Malta 🇲🇺 Mauritius 🇲🇻 Maldives 🇲🇼 Malawi 🇲🇽 Mexico 🇲🇾 Malaysia 🇲🇿 Mozambique 🇳🇦 Namibia 🇳🇨 New Caledonia 🇳🇪 Niger 🇳🇫 Norfolk Island 🇳🇬 Nigeria 🇳🇮 Nicaragua 🇳🇱 Netherlands 🇳🇴 Norway 🇳🇵 Nepal 🇳🇷 Nauru 🇳🇺 Niue 🇳🇿 New Zealand 🇴🇲 Oman 🇵🇦 Panama 🇵🇪 Peru 🇵🇫 French Polynesia 🇵🇬 Papua New Guinea 🇵🇭 Philippines 🇵🇰 Pakistan 🇵🇱 Poland 🇵🇲 St. Pierre & Miquelon 🇵🇳 Pitcairn Islands 🇵🇷 Puerto Rico 🇵🇸 Palestinian Territories 🇵🇹 Portugal 🇵🇼 Palau 🇵🇾 Paraguay 🇶🇦 Qatar 🇷🇪 Réunion 🇷🇴 Romania 🇷🇸 Serbia 🇷🇺 Russia 🇷🇼 Rwanda 🇸🇦 Saudi Arabia 🇸🇧 Solomon Islands 🇸🇨 Seychelles 🇸🇩 Sudan 🇸🇪 Sweden 🇸🇬 Singapore 🇸🇭 St. Helena 🇸🇮 Slovenia 🇸🇯 Svalbard & Jan Mayen 🇸🇰 Slovakia 🇸🇱 Sierra Leone 🇸🇲 San Marino 🇸🇳 Senegal 🇸🇴 Somalia 🇸🇷 Suriname 🇸🇸 South Sudan 🇸🇹 São Tomé & Príncipe 🇸🇻 El Salvador 🇸🇽 Sint Maarten 🇸🇾 Syria 🇸🇿 Swaziland 🇹🇦 Tristan Da Cunha 🇹🇨 Turks & Caicos Islands 🇹🇩 Chad 🇹🇫 French Southern Territories 🇹🇬 Togo 🇹🇭 Thailand 🇹🇯 Tajikistan 🇹🇰 Tokelau 🇹🇱 Timor-Leste 🇹🇲 Turkmenistan 🇹🇳 Tunisia 🇹🇴 Tonga 🇹🇷 Turkey 🇹🇹 Trinidad & Tobago 🇹🇻 Tuvalu 🇹🇼 Taiwan 🇹🇿 Tanzania 🇺🇦 Ukraine 🇺🇬 Uganda 🇺🇲 U.S. Outlying Islands 🇺🇳 United Nations 🇺🇸 United States 🇺🇾 Uruguay 🇺🇿 Uzbekistan 🇻🇦 Vatican City 🇻🇨 St. Vincent & Grenadines 🇻🇪 Venezuela 🇻🇬 British Virgin Islands 🇻🇮 U.S. Virgin Islands 🇻🇳 Vietnam 🇻🇺 Vanuatu 🇼🇫 Wallis & Futuna 🇼🇸 Samoa 🇽🇰 Kosovo 🇾🇪 Yemen 🇾🇹 Mayotte 🇿🇦 South Africa 🇿🇲 Zambia 🇿🇼 Zimbabwe 🏴󠁧󠁢󠁥󠁮󠁧󠁿 Flag for England (GB-ENG) 🏴󠁧󠁢󠁳󠁣󠁴󠁿 Flag for Scotland (GB-SCT) 🏴󠁧󠁢󠁷󠁬󠁳󠁿 Flag for Wales (GB-WLS) 🥆 Rifle 🤻 Modern Pentathlon 🏴‍☠️ Pirate Flag 🇦 Regional Indicator Symbol Letter A 🇧 Regional Indicator Symbol Letter B 🇨 Regional Indicator Symbol Letter C 🇩 Regional Indicator Symbol Letter D 🇪 Regional Indicator Symbol Letter E 🇫 Regional Indicator Symbol Letter F 🇬 Regional Indicator Symbol Letter G 🇭 Regional Indicator Symbol Letter H 🇮 Regional Indicator Symbol Letter I 🇯 Regional Indicator Symbol Letter J 🇰 Regional Indicator Symbol Letter K 🇱 Regional Indicator Symbol Letter L 🇲 Regional Indicator Symbol Letter M 🇳 Regional Indicator Symbol Letter N 🇴 Regional Indicator Symbol Letter O 🇵 Regional Indicator Symbol Letter P 🇶 Regional Indicator Symbol Letter Q 🇷 Regional Indicator Symbol Letter R 🇸 Regional Indicator Symbol Letter S 🇹 Regional Indicator Symbol Letter T 🇺 Regional Indicator Symbol Letter U 🇻 Regional Indicator Symbol Letter V 🇼 Regional Indicator Symbol Letter W 🇽 Regional Indicator Symbol Letter X 🇾 Regional Indicator Symbol Letter Y 🇿 Regional Indicator Symbol Letter Z 🐱‍🐉 Dino Cat 🐱‍🚀 Astro Cat 🐱‍👤 Ninja Cat 🐱‍💻 Hacker Cat 🐱‍🏍 Stunt Cat 🐱‍👓 Hipster Cat ◯‍◯‍◯‍◯‍◯ Olympic Rings 🏴󠁮󠁲󠀰󠀵󠁿 Flag for Baiti (NR-05) 🏴󠁮󠁯󠀱󠀷󠁿 Flag for Nord-Trøndelag (NO-17) 🏴󠁮󠁯󠀱󠀲󠁿 Flag for Hordaland (NO-12) 🏴󠁮󠁯󠀰󠀲󠁿 Flag for Akershus (NO-02) 🏴󠁮󠁯󠀱󠀶󠁿 Flag for Sør-Trøndelag (NO-16) 🏴󠁮󠁯󠀰󠀸󠁿 Flag for Telemark (NO-08) 🏴󠁮󠁬󠁵󠁴󠁿 Flag for Utrecht (NL-UT) 🏴󠁮󠁯󠀱󠀵󠁿 Flag for Møre og Romsdal (NO-15) 🏴󠁮󠁯󠀲󠀱󠁿 Flag for Svalbard (NO-21) 🏴󠁮󠁰󠀴󠁿 Flag for Purwanchal (NP-4) 🏴󠁮󠁰󠀱󠁿 Flag for Central (NP-1) 🏴󠁮󠁯󠀰󠀳󠁿 Flag for Oslo (NO-03) 🏴󠁮󠁲󠀰󠀶󠁿 Flag for Boe (NR-06) 👨🏾‍👨🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁮󠁬󠁮󠁢󠁿 Flag for North Brabant (NL-NB) 🏴󠁮󠁯󠀰󠀹󠁿 Flag for Aust-Agder (NO-09) 🏴󠁮󠁲󠀰󠀲󠁿 Flag for Anabar (NR-02) 🏴󠁮󠁬󠁬󠁩󠁿 Flag for Limburg (NL-LI) 🏴󠁮󠁯󠀰󠀶󠁿 Flag for Buskerud (NO-06) 🏴󠁮󠁯󠀰󠀴󠁿 Flag for Hedmark (NO-04) 🏴󠁮󠁯󠀰󠀷󠁿 Flag for Vestfold (NO-07) 🏴󠁮󠁲󠀰󠀴󠁿 Flag for Anibare (NR-04) 🏴󠁮󠁯󠀲󠀰󠁿 Flag for Finnmark (NO-20) 🏴󠁮󠁬󠁯󠁶󠁿 Flag for Overijssel (NL-OV) 🏴󠁮󠁯󠀱󠀱󠁿 Flag for Rogaland (NO-11) 🏴󠁮󠁯󠀰󠀱󠁿 Flag for Østfold (NO-01) 🏴󠁮󠁲󠀰󠀱󠁿 Flag for Aiwo (NR-01) 🏴󠁮󠁬󠁺󠁥󠁿 Flag for Zeeland (NL-ZE) 🏴󠁮󠁲󠀰󠀷󠁿 Flag for Buada (NR-07) 🏴󠁮󠁯󠀱󠀹󠁿 Flag for Troms (NO-19) 🏴󠁮󠁯󠀰󠀵󠁿 Flag for Oppland (NO-05) 🏴󠁮󠁰󠀲󠁿 Flag for Madhya Pashchimanchal (NP-2) 🏴󠁮󠁲󠀰󠀳󠁿 Flag for Anetan (NR-03) 🏴󠁮󠁰󠀳󠁿 Flag for Western (NP-3) 🏴󠁮󠁯󠀲󠀲󠁿 Flag for Jan Mayen (NO-22) 🏴󠁮󠁯󠀱󠀸󠁿 Flag for Nordland (NO-18) 🏴󠁰󠁡󠀱󠁿 Flag for Bocas del Toro (PA-1) 🏴󠁰󠁡󠀳󠁿 Flag for Colón (PA-3) 🏴󠁯󠁭󠁤󠁡󠁿 Flag for Ad Dakhiliyah (OM-DA) 🏴󠁯󠁭󠁭󠁡󠁿 Flag for Muscat (OM-MA) 🏴󠁮󠁲󠀰󠀹󠁿 Flag for Ewa (NR-09) 🏴󠁮󠁺󠁴󠁫󠁩󠁿 Flag for Taranaki (NZ-TKI) 🏴󠁮󠁲󠀱󠀰󠁿 Flag for Ijuw (NR-10) 🏴󠁮󠁺󠁷󠁴󠁣󠁿 Flag for West Coast (NZ-WTC) 🏴󠁮󠁺󠁳󠁴󠁬󠁿 Flag for Southland (NZ-STL) 🏴󠁮󠁺󠁴󠁡󠁳󠁿 Flag for Tasman (NZ-TAS) 🏴󠁮󠁺󠁭󠁷󠁴󠁿 Flag for Manawatu-Wanganui (NZ-MWT) 🏴󠁮󠁺󠁷󠁫󠁯󠁿 Flag for Waikato (NZ-WKO) 🏴󠁮󠁺󠁭󠁢󠁨󠁿 Flag for Marl (NZ-MBH) 🏴󠁮󠁺󠁢󠁯󠁰󠁿 Flag for Bay of Plenty (NZ-BOP) 🏴󠁮󠁲󠀱󠀲󠁿 Flag for Nibok (NR-12) 🏴󠁯󠁭󠁢󠁵󠁿 Flag for Al Buraimi (OM-BU) 🏴󠁮󠁺󠁡󠁵󠁫󠁿 Flag for Auckland (NZ-AUK) 🏴󠁯󠁭󠁳󠁪󠁿 Flag for Janub ash Sharqiyah (OM-SJ) 🏴󠁯󠁭󠁳󠁳󠁿 Flag for Shamal ash Sharqiyah (OM-SS) 🏴󠁰󠁡󠀲󠁿 Flag for Coclé (PA-2) 🏴󠁮󠁲󠀱󠀱󠁿 Flag for Meneng (NR-11) 🏴󠁰󠁡󠀱󠀰󠁿 Flag for West Panamá (PA-10) 🏴󠁯󠁭󠁺󠁡󠁿 Flag for Ad Dhahirah (OM-ZA) 🏴󠁮󠁺󠁮󠁴󠁬󠁿 Flag for Northland (NZ-NTL) 🏴󠁮󠁺󠁣󠁡󠁮󠁿 Flag for Canterbury (NZ-CAN) 🏴󠁮󠁺󠁧󠁩󠁳󠁿 Flag for Gisborne (NZ-GIS) 🏴󠁮󠁺󠁣󠁩󠁴󠁿 Flag for Chatham Islands (NZ-CIT) 🏴󠁮󠁲󠀱󠀳󠁿 Flag for Uaboe (NR-13) 🏴󠁮󠁲󠀰󠀸󠁿 Flag for Denigomodu (NR-08) 🏴󠁯󠁭󠁭󠁵󠁿 Flag for Musandam (OM-MU) 🏴󠁯󠁭󠁢󠁳󠁿 Flag for Shamal al Batinah (OM-BS) 🏴󠁮󠁺󠁨󠁫󠁢󠁿 Flag for Hawke’s Bay (NZ-HKB) 🏴󠁮󠁺󠁯󠁴󠁡󠁿 Flag for Otago (NZ-OTA) 🏴󠁯󠁭󠁢󠁪󠁿 Flag for Janub al Batinah (OM-BJ) 🏴󠁯󠁭󠁺󠁵󠁿 Flag for Dhofar (OM-ZU) 🏴󠁰󠁡󠀵󠁿 Flag for Darién (PA-5) 🏴󠁰󠁥󠁣󠁡󠁬󠁿 Flag for El Callao (PE-CAL) 🏴󠁰󠁡󠀶󠁿 Flag for Herrera (PA-6) 🏴󠁰󠁡󠁫󠁹󠁿 Flag for Guna Yala (PA-KY) 🏴󠁰󠁡󠁥󠁭󠁿 Flag for Emberá (PA-EM) 🏴󠁰󠁥󠁬󠁡󠁬󠁿 Flag for La Libertad (PE-LAL) 🏴󠁰󠁡󠀹󠁿 Flag for Veraguas (PA-9) 🏴󠁰󠁥󠁬󠁯󠁲󠁿 Flag for Loreto (PE-LOR) 🏴󠁰󠁥󠁡󠁭󠁡󠁿 Flag for Amazonas (PE-AMA) 🏴󠁰󠁡󠀴󠁿 Flag for Chiriquí (PA-4) 🏴󠁰󠁧󠁣󠁰󠁫󠁿 Flag for Chimbu (PG-CPK) 🏴󠁰󠁧󠁥󠁨󠁧󠁿 Flag for Eastern Highlands (PG-EHG) 🏴󠁰󠁥󠁳󠁡󠁭󠁿 Flag for San Martín (PE-SAM) 🏴󠁰󠁥󠁪󠁵󠁮󠁿 Flag for Junín (PE-JUN) 🏴󠁰󠁥󠁨󠁵󠁣󠁿 Flag for Huánuco (PE-HUC) 🏴󠁰󠁥󠁰󠁡󠁳󠁿 Flag for Pasco (PE-PAS) 🏴󠁰󠁡󠁮󠁢󠁿 Flag for Ngöbe-Buglé (PA-NB) 🏴󠁰󠁥󠁣󠁡󠁪󠁿 Flag for Cajamarca (PE-CAJ) 🏴󠁰󠁥󠁩󠁣󠁡󠁿 Flag for Ica (PE-ICA) 🏴󠁰󠁥󠁬󠁩󠁭󠁿 Flag for Lima Region (PE-LIM) 🏴󠁰󠁥󠁭󠁯󠁱󠁿 Flag for Moquegua (PE-MOQ) 🏴󠁰󠁥󠁰󠁵󠁮󠁿 Flag for Puno (PE-PUN) 🏴󠁰󠁥󠁵󠁣󠁡󠁿 Flag for Ucayali (PE-UCA) 🏴󠁰󠁥󠁬󠁭󠁡󠁿 Flag for Lima (PE-LMA) 🏴󠁰󠁥󠁰󠁩󠁵󠁿 Flag for Piura (PE-PIU) 🏴󠁰󠁥󠁴󠁵󠁭󠁿 Flag for Tumbes (PE-TUM) 🏴󠁰󠁥󠁣󠁵󠁳󠁿 Flag for Cusco (PE-CUS) 🏴󠁰󠁡󠀸󠁿 Flag for Panamá (PA-8) 🏴󠁰󠁥󠁴󠁡󠁣󠁿 Flag for Tacna (PE-TAC) 🏴󠁰󠁧󠁣󠁰󠁭󠁿 Flag for Central (PG-CPM) 🏴󠁰󠁡󠀷󠁿 Flag for Los Santos (PA-7) 🏴󠁰󠁥󠁬󠁡󠁭󠁿 Flag for Lambayeque (PE-LAM) 🏴󠁰󠁥󠁨󠁵󠁶󠁿 Flag for Huancavelica (PE-HUV) 🏴󠁰󠁥󠁡󠁮󠁣󠁿 Flag for Ancash (PE-ANC) 🏴󠁰󠁧󠁨󠁬󠁡󠁿 Flag for Hela (PG-HLA) 🏴󠁰󠁧󠁮󠁣󠁤󠁿 Flag for Port Moresby (PG-NCD) 🏴󠁰󠁫󠁩󠁳󠁿 Flag for Islamabad (PK-IS) 🏴󠁰󠁨󠀰󠀰󠁿 Flag for Metro Manila (PH-00) 🏴󠁰󠁨󠀰󠀵󠁿 Flag for Bicol (PH-05) 🏴󠁰󠁧󠁧󠁰󠁫󠁿 Flag for Gulf (PG-GPK) 🏴󠁰󠁨󠀰󠀹󠁿 Flag for Zamboanga Peninsula (PH-09) 🏴󠁰󠁧󠁮󠁳󠁢󠁿 Flag for Bougainville (PG-NSB) 🏴󠁰󠁫󠁧󠁢󠁿 Flag for Gilgit-Baltistan (PK-GB) 🏴󠁰󠁧󠁭󠁰󠁭󠁿 Flag for Madang (PG-MPM) 🏴󠁦󠁪󠁷󠁿 Flag for Western (FJ-W) 🏴󠁰󠁨󠀱󠀲󠁿 Flag for Soccsksargen (PH-12) 🏴󠁰󠁨󠀰󠀸󠁿 Flag for Eastern Visayas (PH-08) 🏴󠁰󠁧󠁥󠁰󠁷󠁿 Flag for Enga (PG-EPW) 🏴󠁰󠁧󠁭󠁢󠁡󠁿 Flag for Milne Bay (PG-MBA) 🏴󠁰󠁨󠀴󠀰󠁿 Flag for Calabarzon (PH-40) 🏴󠁰󠁧󠁪󠁷󠁫󠁿 Flag for Jiwaka (PG-JWK) 🏴󠁰󠁨󠀰󠀲󠁿 Flag for Cagayan Valley (PH-02) 👨🏿‍👨🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁰󠁧󠁭󠁰󠁬󠁿 Flag for Morobe (PG-MPL) 🏴󠁰󠁨󠀱󠀰󠁿 Flag for Northern Mindanao (PH-10) 🏴󠁰󠁨󠀴󠀱󠁿 Flag for Mimaropa (PH-41) 🏴󠁰󠁫󠁢󠁡󠁿 Flag for Balochistan (PK-BA) 🏴󠁰󠁨󠀱󠀳󠁿 Flag for Caraga (PH-13) 🏴󠁰󠁧󠁥󠁳󠁷󠁿 Flag for East Sepik (PG-ESW) 🏴󠁰󠁨󠀰󠀶󠁿 Flag for Western Visayas (PH-06) 🏴󠁰󠁨󠀰󠀳󠁿 Flag for Central Luzon (PH-03) 🏴󠁰󠁨󠀱󠀴󠁿 Flag for Muslim Mindanao (PH-14) 🏴󠁰󠁧󠁳󠁨󠁭󠁿 Flag for Southern Highlands (PG-SHM) 🏴󠁰󠁧󠁷󠁰󠁤󠁿 Flag for Western (PG-WPD) 🏴󠁰󠁧󠁳󠁡󠁮󠁿 Flag for Sandaun (PG-SAN) 🏴󠁰󠁧󠁮󠁩󠁫󠁿 Flag for New Ireland (PG-NIK) 🏴󠁰󠁧󠁮󠁰󠁰󠁿 Flag for Oro (PG-NPP) 🏴󠁰󠁧󠁭󠁲󠁬󠁿 Flag for Manus (PG-MRL) 🏴󠁰󠁧󠁷󠁨󠁭󠁿 Flag for Western Highlands (PG-WHM) 🏴󠁰󠁨󠀱󠀱󠁿 Flag for Davao (PH-11) 🏴󠁰󠁫󠁰󠁢󠁿 Flag for Punjab (PK-PB) 🏴󠁰󠁬󠁰󠁭󠁿 Flag for Federal Capital Territory (PL-PM) 🏴󠁰󠁬󠁳󠁬󠁿 Flag for Silesia (PL-SL) 🏴󠁰󠁬󠁫󠁰󠁿 Flag for Kuyavian-Pomerania (PL-KP) 🏴󠁰󠁳󠁴󠁢󠁳󠁿 Flag for Tubas (PS-TBS) 🏴󠁰󠁳󠁲󠁢󠁨󠁿 Flag for Ramallah and al-Bireh (PS-RBH) 🏴󠁰󠁳󠁧󠁺󠁡󠁿 Flag for Gaza (PS-GZA) 🏴󠁰󠁳󠁲󠁦󠁨󠁿 Flag for Rafah (PS-RFH) 🏴󠁰󠁳󠁨󠁢󠁮󠁿 Flag for Hebron (PS-HBN) 🏴󠁰󠁬󠁰󠁤󠁿 Flag for Podlaskie (PL-PD) 🏴󠁰󠁬󠁰󠁫󠁿 Flag for Subcarpathia (PL-PK) 🏴󠁰󠁳󠁪󠁥󠁮󠁿 Flag for Jenin (PS-JEN) 🏴󠁰󠁬󠁤󠁳󠁿 Flag for Lower Silesian (PL-DS) 🏴󠁰󠁳󠁫󠁹󠁳󠁿 Flag for Khan Yunis (PS-KYS) 🏴󠁰󠁬󠁬󠁤󠁿 Flag for Łódź (PL-LD) 🏴󠁰󠁳󠁮󠁧󠁺󠁿 Flag for North Gaza (PS-NGZ) 🏴󠁰󠁬󠁺󠁰󠁿 Flag for West Pomerania (PL-ZP) 🏴󠁰󠁫󠁪󠁫󠁿 Flag for Azad Kashmir (PK-JK) 🏴󠁰󠁳󠁳󠁬󠁴󠁿 Flag for Salfit (PS-SLT) 🏴󠁰󠁬󠁭󠁺󠁿 Flag for Mazovia (PL-MZ) 🏴󠁰󠁬󠁭󠁡󠁿 Flag for Lesser Poland (PL-MA) 🏴󠁰󠁳󠁱󠁱󠁡󠁿 Flag for Qalqilya (PS-QQA) 🏴󠁰󠁴󠀰󠀱󠁿 Flag for Aveiro (PT-01) 🏴󠁰󠁬󠁷󠁰󠁿 Flag for Greater Poland (PL-WP) 🏴󠁰󠁬󠁯󠁰󠁿 Flag for Opole (PL-OP) 🏴󠁰󠁳󠁢󠁴󠁨󠁿 Flag for Bethlehem (PS-BTH) 🏴󠁰󠁫󠁫󠁰󠁿 Flag for Khyber Pakhtunkhwa (PK-KP) 🏴󠁰󠁳󠁴󠁫󠁭󠁿 Flag for Tulkarm (PS-TKM) 🏴󠁰󠁳󠁮󠁢󠁳󠁿 Flag for Nablus (PS-NBS) 🏴󠁰󠁬󠁷󠁮󠁿 Flag for Warmian-Masuria (PL-WN) 🏴󠁰󠁳󠁪󠁲󠁨󠁿 Flag for Jericho (PS-JRH) 🏴󠁰󠁫󠁳󠁤󠁿 Flag for Sindh (PK-SD) 🏴󠁰󠁬󠁬󠁵󠁿 Flag for Lublin (PL-LU) 🏴󠁰󠁳󠁪󠁥󠁭󠁿 Flag for Jerusalem (PS-JEM) 🏴󠁰󠁬󠁬󠁢󠁿 Flag for Lubusz (PL-LB) 🏴󠁰󠁬󠁳󠁫󠁿 Flag for Świętokrzyskie (PL-SK) 🏴󠁰󠁷󠀲󠀱󠀲󠁿 Flag for Melekeok (PW-212) 🏴󠁰󠁴󠀰󠀸󠁿 Flag for Faro (PT-08) 🏴󠁰󠁹󠀱󠀱󠁿 Flag for Central (PY-11) 🏴󠁰󠁴󠀰󠀷󠁿 Flag for Évora (PT-07) 🏴󠁰󠁷󠀲󠀲󠀸󠁿 Flag for Ngiwal (PW-228) 🏴󠁰󠁹󠀱󠀲󠁿 Flag for Ñeembucú (PY-12) 🏴󠁰󠁴󠀱󠀶󠁿 Flag for Viana do Castelo (PT-16) 🏴󠁰󠁴󠀱󠀱󠁿 Flag for Lisbon (PT-11) 🏴󠁰󠁹󠀱󠀵󠁿 Flag for Presidente Hayes (PY-15) 🏴󠁰󠁴󠀱󠀷󠁿 Flag for Vila Real (PT-17) 🏴󠁰󠁴󠀱󠀸󠁿 Flag for Viseu (PT-18) 🏴󠁰󠁷󠀰󠀰󠀴󠁿 Flag for Airai (PW-004) 🏴󠁰󠁹󠀱󠀳󠁿 Flag for Amambay (PY-13) 🏴󠁰󠁷󠀲󠀲󠀴󠁿 Flag for Ngatpang (PW-224) 🏴󠁰󠁴󠀰󠀶󠁿 Flag for Coimbra (PT-06) 🏴󠁰󠁴󠀱󠀲󠁿 Flag for Portalegre (PT-12) 🏴󠁰󠁷󠀳󠀵󠀰󠁿 Flag for Peleliu (PW-350) 🏴󠁰󠁷󠀲󠀲󠀲󠁿 Flag for Ngardmau (PW-222) 🏴󠁰󠁷󠀲󠀱󠀴󠁿 Flag for Ngaraard (PW-214) 🏴󠁰󠁹󠀱󠀴󠁿 Flag for Canindeyú (PY-14) 🏴󠁰󠁷󠀰󠀱󠀰󠁿 Flag for Angaur (PW-010) 🏴󠁰󠁷󠀳󠀷󠀰󠁿 Flag for Sonsorol (PW-370) 🏴󠁰󠁴󠀰󠀴󠁿 Flag for Bragança (PT-04) 🏴󠁰󠁴󠀰󠀵󠁿 Flag for Castelo Branco (PT-05) 🏴󠁰󠁴󠀱󠀴󠁿 Flag for Santarém (PT-14) 🏴󠁰󠁴󠀰󠀳󠁿 Flag for Braga (PT-03) 🏴󠁰󠁷󠀰󠀵󠀰󠁿 Flag for Hatohobei (PW-050) 🏴󠁰󠁷󠀱󠀵󠀰󠁿 Flag for Koror (PW-150) 🏴󠁰󠁹󠀱󠀰󠁿 Flag for Alto Paraná (PY-10) 🏴󠁰󠁷󠀲󠀲󠀷󠁿 Flag for Ngeremlengui (PW-227) 🏴󠁰󠁴󠀱󠀰󠁿 Flag for Leiria (PT-10) 🏴󠁰󠁴󠀱󠀳󠁿 Flag for Porto (PT-13) 🏴󠁰󠁴󠀱󠀵󠁿 Flag for Setúbal (PT-15) 🏴󠁰󠁷󠀰󠀰󠀲󠁿 Flag for Aimeliik (PW-002) 🏴󠁰󠁷󠀲󠀲󠀶󠁿 Flag for Ngchesar (PW-226) 🏴󠁰󠁴󠀰󠀹󠁿 Flag for Guarda (PT-09) 🏴󠁰󠁹󠀲󠁿 Flag for San Pedro (PY-2) 🏴󠁰󠁹󠀵󠁿 Flag for Caaguazú (PY-5) 🏴󠁰󠁹󠀴󠁿 Flag for Guairá (PY-4) 🏴󠁲󠁯󠁢󠁣󠁿 Flag for Bacău (RO-BC) 🏴󠁰󠁹󠀷󠁿 Flag for Itapúa (PY-7) 🏴󠁲󠁯󠁣󠁳󠁿 Flag for Caraș-Severin (RO-CS) 🏴󠁰󠁹󠀶󠁿 Flag for Caazapá (PY-6) 🏴󠁱󠁡󠁫󠁨󠁿 Flag for Al Khor (QA-KH) 🏴󠁲󠁯󠁣󠁶󠁿 Flag for Covasna (RO-CV) 🏴󠁲󠁯󠁡󠁢󠁿 Flag for Alba (RO-AB) 🏴󠁱󠁡󠁤󠁡󠁿 Flag for Doha (QA-DA) 🏴󠁲󠁯󠁤󠁪󠁿 Flag for Dolj (RO-DJ) 🏴󠁰󠁹󠀳󠁿 Flag for Cordillera (PY-3) 🏴󠁱󠁡󠁭󠁳󠁿 Flag for Madinat ash Shamal (QA-MS) 🏴󠁲󠁯󠁢󠁨󠁿 Flag for Bihor (RO-BH) 🏴󠁲󠁯󠁨󠁲󠁿 Flag for Harghita (RO-HR) 🏴󠁲󠁯󠁢󠁲󠁿 Flag for Brăila (RO-BR) 🏴󠁲󠁯󠁡󠁧󠁿 Flag for Argeș (RO-AG) 🏴󠁱󠁡󠁺󠁡󠁿 Flag for Al Daayen (QA-ZA) 🏴󠁲󠁯󠁢󠁮󠁿 Flag for Bistriţa-Năsăud (RO-BN) 🏴󠁲󠁯󠁣󠁬󠁿 Flag for Călărași (RO-CL) 🏴󠁰󠁹󠁡󠁳󠁵󠁿 Flag for Asunción (PY-ASU) 🏴󠁰󠁹󠀱󠁿 Flag for Concepción (PY-1) 🏴󠁲󠁯󠁢󠁴󠁿 Flag for Botoşani (RO-BT) 🏴󠁲󠁯󠁧󠁬󠁿 Flag for Galați (RO-GL) 🏴󠁲󠁯󠁧󠁲󠁿 Flag for Giurgiu (RO-GR) 🏴󠁰󠁹󠀱󠀹󠁿 Flag for Boquerón (PY-19) 🏴󠁰󠁹󠀸󠁿 Flag for Misiones (PY-8) 🏴󠁲󠁯󠁢󠁿 Flag for Bucharest (RO-B) 🏴󠁰󠁹󠀹󠁿 Flag for Paraguarí (PY-9) 🏴󠁱󠁡󠁲󠁡󠁿 Flag for Al Rayyan (QA-RA) 🏴󠁲󠁯󠁣󠁴󠁿 Flag for Constanța (RO-CT) 🏴󠁲󠁯󠁨󠁤󠁿 Flag for Hunedoara (RO-HD) 🏴󠁲󠁯󠁤󠁢󠁿 Flag for Dâmbovița (RO-DB) 🏴󠁲󠁯󠁡󠁲󠁿 Flag for Arad (RO-AR) 🏴󠁲󠁯󠁣󠁪󠁿 Flag for Cluj (RO-CJ) 🏴󠁲󠁯󠁢󠁺󠁿 Flag for Buzău (RO-BZ) 🏴󠁱󠁡󠁷󠁡󠁿 Flag for Al Wakrah (QA-WA) 🏴󠁲󠁯󠁶󠁬󠁿 Flag for Vâlcea (RO-VL) 🏴󠁲󠁯󠁩󠁳󠁿 Flag for Iași (RO-IS) 🏴󠁲󠁯󠁭󠁨󠁿 Flag for Mehedinți (RO-MH) 🏴󠁲󠁳󠁫󠁭󠁿 Flag for Kosovo-Metohija (RS-KM) 🏴󠁲󠁯󠁩󠁬󠁿 Flag for Ialomița (RO-IL) 🏴󠁲󠁯󠁴󠁲󠁿 Flag for Teleorman (RO-TR) 🏴󠁲󠁳󠀱󠀲󠁿 Flag for Šumadija (RS-12) 🏴󠁲󠁳󠀲󠀰󠁿 Flag for Nišava (RS-20) 🏴󠁲󠁵󠁡󠁬󠁿 Flag for Altai (RU-AL) 🏴󠁲󠁯󠁶󠁮󠁿 Flag for Vrancea (RO-VN) 🏴󠁲󠁯󠁶󠁳󠁿 Flag for Vaslui (RO-VS) 🏴󠁲󠁯󠁩󠁦󠁿 Flag for Ilfov (RO-IF) 🏴󠁲󠁳󠀰󠀸󠁿 Flag for Mačva (RS-08) 🏴󠁲󠁳󠀰󠀹󠁿 Flag for Kolubara (RS-09) 🏴󠁲󠁯󠁰󠁨󠁿 Flag for Prahova (RO-PH) 🏴󠁲󠁳󠀱󠀱󠁿 Flag for Braničevo (RS-11) 🏴󠁲󠁳󠀰󠀰󠁿 Flag for Beograd (RS-00) 🏴󠁲󠁳󠀱󠀵󠁿 Flag for Zaječar (RS-15) 🏴󠁲󠁳󠀱󠀷󠁿 Flag for Moravica (RS-17) 🏴󠁲󠁳󠀱󠀳󠁿 Flag for Pomoravlje (RS-13) 🏴󠁲󠁯󠁯󠁴󠁿 Flag for Olt (RO-OT) 🏴󠁲󠁯󠁳󠁭󠁿 Flag for Satu Mare (RO-SM) 🏴󠁲󠁳󠀲󠀱󠁿 Flag for Toplica (RS-21) 🏴󠁲󠁯󠁳󠁪󠁿 Flag for Sălaj (RO-SJ) 🏴󠁲󠁯󠁭󠁳󠁿 Flag for Mureş (RO-MS) 🏴󠁲󠁳󠀲󠀲󠁿 Flag for Pirot (RS-22) 🏴󠁲󠁳󠀱󠀹󠁿 Flag for Rasina (RS-19) 🏴󠁲󠁳󠀲󠀴󠁿 Flag for Pčinja (RS-24) 🏴󠁲󠁯󠁭󠁭󠁿 Flag for Maramureş (RO-MM) 🏴󠁲󠁯󠁳󠁶󠁿 Flag for Suceava (RO-SV) 🏴󠁲󠁳󠀱󠀸󠁿 Flag for Raška (RS-18) 🏴󠁲󠁳󠀱󠀴󠁿 Flag for Bor (RS-14) 🏴󠁲󠁳󠀱󠀰󠁿 Flag for Podunavlje (RS-10) 🏴󠁲󠁯󠁮󠁴󠁿 Flag for Neamţ (RO-NT) 🏴󠁲󠁳󠀱󠀶󠁿 Flag for Zlatibor (RS-16) 🏴󠁲󠁳󠁶󠁯󠁿 Flag for Vojvodina (RS-VO) 🏴󠁲󠁳󠀲󠀳󠁿 Flag for Jablanica (RS-23) 🏴󠁲󠁯󠁴󠁬󠁿 Flag for Tulcea (RO-TL) 🏴󠁲󠁵󠁡󠁤󠁿 Flag for Adygea (RU-AD) 🏴󠁲󠁯󠁴󠁭󠁿 Flag for Timiș (RO-TM) 👩🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁲󠁵󠁫󠁣󠁿 Flag for Karachay-Cherkess (RU-KC) 🏴󠁲󠁵󠁫󠁫󠁿 Flag for Khakassia (RU-KK) 🏴󠁲󠁵󠁢󠁵󠁿 Flag for Buryat (RU-BU) 🏴󠁲󠁵󠁫󠁬󠁿 Flag for Kalmykia (RU-KL) 🏴󠁲󠁵󠁢󠁥󠁬󠁿 Flag for Belgorod (RU-BEL) 🏴󠁲󠁵󠁫󠁨󠁭󠁿 Flag for Khanty-Mansi (RU-KHM) 🏴󠁲󠁵󠁬󠁥󠁮󠁿 Flag for Leningrad (RU-LEN) 🏴󠁲󠁵󠁫󠁧󠁮󠁿 Flag for Kurgan (RU-KGN) 🏴󠁲󠁵󠁩󠁶󠁡󠁿 Flag for Ivanovo (RU-IVA) 🏴󠁲󠁵󠁩󠁮󠁿 Flag for Ingushetia (RU-IN) 🏴󠁲󠁵󠁫󠁩󠁲󠁿 Flag for Kirov (RU-KIR) 🏴󠁲󠁵󠁫󠁤󠁡󠁿 Flag for Krasnodar Krai (RU-KDA) 🏴󠁲󠁵󠁫󠁲󠁿 Flag for Karelia (RU-KR) 🏴󠁲󠁵󠁭󠁡󠁧󠁿 Flag for Magadan (RU-MAG) 🏴󠁲󠁵󠁫󠁹󠁡󠁿 Flag for Krasnoyarsk Krai (RU-KYA) 🏴󠁲󠁵󠁫󠁥󠁭󠁿 Flag for Kemerovo (RU-KEM) 🏴󠁲󠁵󠁡󠁳󠁴󠁿 Flag for Astrakhan (RU-AST) 🏴󠁲󠁵󠁡󠁭󠁵󠁿 Flag for Amur (RU-AMU) 🏴󠁲󠁵󠁭󠁯󠁿 Flag for Mordovia (RU-MO) 🏴󠁲󠁵󠁫󠁯󠁿 Flag for Komi (RU-KO) 🏴󠁲󠁵󠁣󠁨󠁥󠁿 Flag for Chelyabinsk (RU-CHE) 🏴󠁲󠁵󠁫󠁨󠁡󠁿 Flag for Khabarovsk Krai (RU-KHA) 🏴󠁲󠁵󠁫󠁲󠁳󠁿 Flag for Kursk (RU-KRS) 🏴󠁲󠁵󠁭󠁥󠁿 Flag for Mari El (RU-ME) 🏴󠁲󠁵󠁣󠁨󠁵󠁿 Flag for Chukotka Okrug (RU-CHU) 🏴󠁲󠁵󠁫󠁧󠁤󠁿 Flag for Kaliningrad (RU-KGD) 🏴󠁲󠁵󠁩󠁲󠁫󠁿 Flag for Irkutsk (RU-IRK) 🏴󠁲󠁵󠁫󠁬󠁵󠁿 Flag for Kaluga (RU-KLU) 🏴󠁲󠁵󠁫󠁢󠁿 Flag for Kabardino-Balkar (RU-KB) 🏴󠁲󠁵󠁬󠁩󠁰󠁿 Flag for Lipetsk (RU-LIP) 🏴󠁲󠁵󠁢󠁡󠁿 Flag for Bashkortostan (RU-BA) 🏴󠁲󠁵󠁣󠁵󠁿 Flag for Chuvash (RU-CU) 🏴󠁲󠁵󠁫󠁡󠁭󠁿 Flag for Kamchatka Krai (RU-KAM) 🏴󠁲󠁵󠁫󠁯󠁳󠁿 Flag for Kostroma (RU-KOS) 🏴󠁲󠁵󠁳󠁡󠁫󠁿 Flag for Sakhalin (RU-SAK) 🏴󠁲󠁵󠁴󠁶󠁥󠁿 Flag for Tver (RU-TVE) 🏴󠁲󠁵󠁮󠁶󠁳󠁿 Flag for Novosibirsk (RU-NVS) 🏴󠁲󠁵󠁶󠁬󠁡󠁿 Flag for Vladimir (RU-VLA) 🏴󠁲󠁵󠁯󠁲󠁬󠁿 Flag for Oryol (RU-ORL) 🏴󠁲󠁵󠁳󠁴󠁡󠁿 Flag for Stavropol Krai (RU-STA) 🏴󠁲󠁵󠁮󠁩󠁺󠁿 Flag for Nizhny Novgorod (RU-NIZ) 🏴󠁲󠁵󠁳󠁡󠁲󠁿 Flag for Saratov (RU-SAR) 🏴󠁲󠁵󠁯󠁲󠁥󠁿 Flag for Orenburg (RU-ORE) 🏴󠁲󠁵󠁮󠁥󠁮󠁿 Flag for Nenets (RU-NEN) 🏴󠁲󠁵󠁶󠁧󠁧󠁿 Flag for Volgograd (RU-VGG) 🏴󠁲󠁵󠁴󠁯󠁭󠁿 Flag for Tomsk (RU-TOM) 🏴󠁲󠁵󠁳󠁶󠁥󠁿 Flag for Sverdlovsk (RU-SVE) 🏴󠁲󠁵󠁳󠁰󠁥󠁿 Flag for Saint Petersburg (RU-SPE) 🏴󠁲󠁵󠁹󠁡󠁮󠁿 Flag for Yamalo-Nenets Okrug (RU-YAN) 🏴󠁲󠁵󠁳󠁡󠁿 Flag for Sakha (RU-SA) 🏴󠁲󠁵󠁭󠁯󠁷󠁿 Flag for Moscow (RU-MOW) 🏴󠁲󠁵󠁰󠁮󠁺󠁿 Flag for Penza (RU-PNZ) 🏴󠁲󠁵󠁳󠁭󠁯󠁿 Flag for Smolensk (RU-SMO) 🏴󠁲󠁵󠁴󠁡󠁿 Flag for Tatarstan (RU-TA) 🏴󠁲󠁵󠁶󠁬󠁧󠁿 Flag for Vologda (RU-VLG) 🏴󠁲󠁵󠁴󠁵󠁬󠁿 Flag for Tula (RU-TUL) 🏴󠁲󠁵󠁹󠁡󠁲󠁿 Flag for Yaroslavl (RU-YAR) 🏴󠁲󠁵󠁴󠁹󠁵󠁿 Flag for Tyumen (RU-TYU) 🏴󠁲󠁵󠁰󠁳󠁫󠁿 Flag for Pskov (RU-PSK) 🏴󠁲󠁵󠁵󠁤󠁿 Flag for Udmurt (RU-UD) 🏴󠁲󠁵󠁳󠁡󠁭󠁿 Flag for Samara (RU-SAM) 🏴󠁲󠁵󠁵󠁬󠁹󠁿 Flag for Ulyanovsk (RU-ULY) 🏴󠁲󠁵󠁲󠁹󠁡󠁿 Flag for Ryazan (RU-RYA) 🏴󠁲󠁵󠁯󠁭󠁳󠁿 Flag for Omsk (RU-OMS) 🏴󠁲󠁵󠁰󠁥󠁲󠁿 Flag for Perm Krai (RU-PER) 🏴󠁲󠁵󠁶󠁯󠁲󠁿 Flag for Voronezh (RU-VOR) 🏴󠁲󠁵󠁮󠁧󠁲󠁿 Flag for Novgorod (RU-NGR) 🏴󠁲󠁵󠁴󠁡󠁭󠁿 Flag for Tambov (RU-TAM) 🏴󠁲󠁵󠁴󠁹󠁿 Flag for Tuva (RU-TY) 🏴󠁲󠁵󠁲󠁯󠁳󠁿 Flag for Rostov (RU-ROS) 🏴󠁲󠁵󠁭󠁵󠁲󠁿 Flag for Murmansk (RU-MUR) 🏴󠁲󠁷󠀰󠀱󠁿 Flag for Kigali (RW-01) 🏴󠁳󠁣󠀰󠀳󠁿 Flag for Anse Etoile (SC-03) 🏴󠁳󠁢󠁩󠁳󠁿 Flag for Isabel (SB-IS) 🏴󠁳󠁣󠀰󠀲󠁿 Flag for Anse Boileau (SC-02) 🏴󠁳󠁡󠀰󠀷󠁿 Flag for Tabuk (SA-07) 🏴󠁳󠁢󠁧󠁵󠁿 Flag for Guadalcanal (SB-GU) 🏴󠁲󠁷󠀰󠀳󠁿 Flag for Northern (RW-03) 🏴󠁲󠁷󠀰󠀵󠁿 Flag for Southern (RW-05) 🏴󠁳󠁢󠁣󠁥󠁿 Flag for Central (SB-CE) 🏴󠁳󠁡󠀰󠀶󠁿 Flag for Ha’il (SA-06) 🏴󠁳󠁣󠀰󠀹󠁿 Flag for Bel Air (SC-09) 🏴󠁳󠁢󠁭󠁬󠁿 Flag for Malaita (SB-ML) 🏴󠁳󠁡󠀱󠀰󠁿 Flag for Najran (SA-10) 🏴󠁳󠁡󠀱󠀲󠁿 Flag for Al Jawf (SA-12) 🏴󠁳󠁢󠁣󠁴󠁿 Flag for Honiara (SB-CT) 🏴󠁳󠁢󠁷󠁥󠁿 Flag for Western (SB-WE) 🏴󠁳󠁡󠀰󠀸󠁿 Flag for Northern Borders (SA-08) 🏴󠁳󠁡󠀰󠀱󠁿 Flag for Riyadh (SA-01) 🏴󠁳󠁢󠁲󠁢󠁿 Flag for Rennell and Bellona (SB-RB) 🏴󠁳󠁣󠀰󠀴󠁿 Flag for Au Cap (SC-04) 🏴󠁲󠁷󠀰󠀲󠁿 Flag for Eastern (RW-02) 🏴󠁳󠁣󠀰󠀵󠁿 Flag for Anse Royale (SC-05) 🏴󠁲󠁵󠁹󠁥󠁶󠁿 Flag for Jewish (RU-YEV) 🏴󠁳󠁣󠀱󠀰󠁿 Flag for Bel Ombre (SC-10) 🏴󠁳󠁡󠀰󠀵󠁿 Flag for Al-Qassim (SA-05) 🏴󠁳󠁢󠁴󠁥󠁿 Flag for Temotu (SB-TE) 🏴󠁳󠁣󠀰󠀷󠁿 Flag for Baie Sainte Anne (SC-07) 🏴󠁳󠁢󠁣󠁨󠁿 Flag for Choiseul (SB-CH) 🏴󠁲󠁷󠀰󠀴󠁿 Flag for Western (RW-04) 🏴󠁳󠁢󠁭󠁫󠁿 Flag for Makira-Ulawa (SB-MK) 🏴󠁳󠁡󠀰󠀲󠁿 Flag for Makkah (SA-02) 🏴󠁳󠁡󠀰󠀹󠁿 Flag for Jizan (SA-09) 🏴󠁳󠁣󠀰󠀱󠁿 Flag for Anse aux Pins (SC-01) 🏴󠁳󠁡󠀰󠀴󠁿 Flag for Eastern (SA-04) 🏴󠁳󠁡󠀱󠀴󠁿 Flag for Asir (SA-14) 🏴󠁲󠁵󠁺󠁡󠁢󠁿 Flag for Zabaykalsky Krai (RU-ZAB) 🏴󠁳󠁣󠀰󠀸󠁿 Flag for Beau Vallon (SC-08) 🏴󠁳󠁡󠀰󠀳󠁿 Flag for Al Madinah (SA-03) 🏴󠁳󠁣󠀰󠀶󠁿 Flag for Baie Lazare (SC-06) 🏴󠁳󠁣󠀱󠀹󠁿 Flag for Plaisance (SC-19) 🏴󠁳󠁥󠁤󠁿 Flag for Södermanland (SE-D) 🏴󠁳󠁣󠀱󠀶󠁿 Flag for La Rivière Anglaise (SC-16) 🏴󠁳󠁣󠀲󠀲󠁿 Flag for Saint Louis (SC-22) 🏴󠁳󠁣󠀱󠀸󠁿 Flag for Mont Fleuri (SC-18) 🏴󠁳󠁤󠁮󠁯󠁿 Flag for Northern (SD-NO) 🏴󠁳󠁣󠀱󠀳󠁿 Flag for Grand’Anse Mahé (SC-13) 🏴󠁳󠁣󠀲󠀳󠁿 Flag for Takamaka (SC-23) 🏴󠁳󠁤󠁤󠁷󠁿 Flag for West Darfur (SD-DW) 🏴󠁳󠁤󠁧󠁤󠁿 Flag for Al Qadarif (SD-GD) 🏴󠁳󠁤󠁤󠁳󠁿 Flag for South Darfur (SD-DS) 🏴󠁳󠁤󠁮󠁲󠁿 Flag for River Nile (SD-NR) 🏴󠁳󠁤󠁧󠁫󠁿 Flag for West Kurdufan (SD-GK) 🏴󠁳󠁤󠁫󠁡󠁿 Flag for Kassala (SD-KA) 🏴󠁳󠁤󠁫󠁨󠁿 Flag for Khartoum (SD-KH) 🏴󠁳󠁣󠀱󠀵󠁿 Flag for La Digue (SC-15) 🏴󠁳󠁣󠀲󠀴󠁿 Flag for Les Mamelles (SC-24) 🏴󠁳󠁣󠀲󠀱󠁿 Flag for Port Glaud (SC-21) 🏴󠁳󠁥󠁡󠁣󠁿 Flag for Västerbotten (SE-AC) 🏴󠁳󠁥󠁦󠁿 Flag for Jönköping (SE-F) 🏴󠁳󠁥󠁡󠁢󠁿 Flag for Stockholm (SE-AB) 🏴󠁳󠁣󠀱󠀲󠁿 Flag for Glacis (SC-12) 🏴󠁳󠁣󠀲󠀰󠁿 Flag for Pointe La Rue (SC-20) 🏴󠁳󠁤󠁮󠁷󠁿 Flag for White Nile (SD-NW) 🏴󠁳󠁤󠁧󠁺󠁿 Flag for Al Jazirah (SD-GZ) 🏴󠁳󠁥󠁥󠁿 Flag for Östergötland (SE-E) 🏴󠁳󠁥󠁢󠁤󠁿 Flag for Norrbotten (SE-BD) 🏴󠁳󠁥󠁣󠁿 Flag for Uppsala (SE-C) 🏴󠁳󠁣󠀱󠀷󠁿 Flag for Mont Buxton (SC-17) 🏴󠁳󠁣󠀱󠀴󠁿 Flag for Grand’Anse Praslin (SC-14) 🏴󠁳󠁤󠁫󠁳󠁿 Flag for South Kurdufan (SD-KS) 🏴󠁳󠁣󠀱󠀱󠁿 Flag for Cascade (SC-11) 🏴󠁳󠁤󠁫󠁮󠁿 Flag for North Kurdufan (SD-KN) 🏴󠁳󠁤󠁳󠁩󠁿 Flag for Sennar (SD-SI) 🏴󠁳󠁤󠁤󠁥󠁿 Flag for East Darfur (SD-DE) 🏴󠁳󠁤󠁮󠁢󠁿 Flag for Blue Nile (SD-NB) 🏴󠁳󠁤󠁤󠁮󠁿 Flag for North Darfur (SD-DN) 🏴󠁳󠁤󠁤󠁣󠁿 Flag for Central Darfur (SD-DC) 🏴󠁳󠁥󠁵󠁿 Flag for Västmanland (SE-U) 🏴󠁳󠁥󠁳󠁿 Flag for Värmland (SE-S) 🏴󠁳󠁩󠀰󠀱󠀷󠁿 Flag for Črnomelj (SI-017) 🏴󠁳󠁥󠁹󠁿 Flag for Västernorrland (SE-Y) 🏴󠁳󠁧󠀰󠀵󠁿 Flag for South West (SG-05) 🏴󠁳󠁩󠀰󠀱󠀶󠁿 Flag for Črna na Koroškem (SI-016) 🏴󠁳󠁥󠁯󠁿 Flag for Västra Götaland (SE-O) 🏴󠁳󠁥󠁸󠁿 Flag for Gävleborg (SE-X) 🏴󠁳󠁧󠀰󠀲󠁿 Flag for North East (SG-02) 🏴󠁳󠁩󠀰󠀰󠀷󠁿 Flag for Brda (SI-007) 🏴󠁳󠁥󠁨󠁿 Flag for Kalmar (SE-H) 🏴󠁳󠁩󠀰󠀱󠀸󠁿 Flag for Destrnik (SI-018) 🏴󠁳󠁩󠀰󠀰󠀲󠁿 Flag for Beltinci (SI-002) 🏴󠁳󠁩󠀰󠀰󠀴󠁿 Flag for Bohinj (SI-004) 🏴󠁳󠁩󠀰󠀰󠀹󠁿 Flag for Brežice (SI-009) 🏴󠁳󠁧󠀰󠀳󠁿 Flag for North West (SG-03) 🏴󠁳󠁨󠁡󠁣󠁿 Flag for Ascension Island (SH-AC) 👩🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁳󠁩󠀰󠀱󠀲󠁿 Flag for Cerklje na Gorenjskem (SI-012) 🏴󠁳󠁩󠀰󠀱󠀳󠁿 Flag for Cerknica (SI-013) 🏴󠁳󠁩󠀰󠀰󠀶󠁿 Flag for Bovec (SI-006) 🏴󠁳󠁩󠀰󠀱󠀵󠁿 Flag for Črenšovci (SI-015) 🏴󠁳󠁥󠁧󠁿 Flag for Kronoberg (SE-G) 🏴󠁳󠁩󠀰󠀰󠀱󠁿 Flag for Ajdovščina (SI-001) 🏴󠁳󠁩󠀰󠀱󠀰󠁿 Flag for Tišina (SI-010) 🏴󠁳󠁧󠀰󠀴󠁿 Flag for South East (SG-04) 🏴󠁳󠁩󠀰󠀰󠀸󠁿 Flag for Brezovica (SI-008) 🏴󠁳󠁨󠁨󠁬󠁿 Flag for Saint Helena (SH-HL) 🏴󠁳󠁥󠁺󠁿 Flag for Jämtland (SE-Z) 🏴󠁳󠁥󠁩󠁿 Flag for Gotland (SE-I) 🏴󠁳󠁥󠁷󠁿 Flag for Dalarna (SE-W) 🏴󠁳󠁥󠁫󠁿 Flag for Blekinge (SE-K) 🏴󠁳󠁩󠀰󠀰󠀵󠁿 Flag for Borovnica (SI-005) 🏴󠁳󠁨󠁴󠁡󠁿 Flag for Tristan da Cunha (SH-TA) 🏴󠁳󠁩󠀰󠀰󠀳󠁿 Flag for Bled (SI-003) 🏴󠁳󠁩󠀰󠀱󠀴󠁿 Flag for Cerkno (SI-014) 🏴󠁳󠁥󠁴󠁿 Flag for Örebro (SE-T) 🏴󠁳󠁩󠀰󠀲󠀳󠁿 Flag for Domžale (SI-023) 🏴󠁳󠁩󠀰󠀴󠀰󠁿 Flag for Izola (SI-040) 🏴󠁳󠁩󠀰󠀵󠀶󠁿 Flag for Kuzma (SI-056) 🏴󠁳󠁩󠀰󠀲󠀵󠁿 Flag for Dravograd (SI-025) 🏴󠁳󠁩󠀰󠀲󠀶󠁿 Flag for Duplek (SI-026) 🏴󠁳󠁩󠀰󠀴󠀱󠁿 Flag for Jesenice (SI-041) 🏴󠁳󠁩󠀰󠀲󠀸󠁿 Flag for Gorišnica (SI-028) 🏴󠁳󠁩󠀰󠀲󠀹󠁿 Flag for Gornja Radgona (SI-029) 🏴󠁳󠁩󠀰󠀲󠀰󠁿 Flag for Dobrepolje (SI-020) 🏴󠁳󠁩󠀰󠀳󠀱󠁿 Flag for Gornji Petrovci (SI-031) 🏴󠁳󠁩󠀰󠀲󠀴󠁿 Flag for Dornava (SI-024) 🏴󠁳󠁩󠀰󠀳󠀴󠁿 Flag for Hrastnik (SI-034) 🏴󠁳󠁩󠀰󠀳󠀹󠁿 Flag for Ivančna Gorica (SI-039) 🏴󠁳󠁩󠀰󠀴󠀹󠁿 Flag for Komen (SI-049) 🏴󠁳󠁩󠀰󠀵󠀱󠁿 Flag for Kozje (SI-051) 🏴󠁳󠁩󠀰󠀱󠀹󠁿 Flag for Divača (SI-019) 🏴󠁳󠁩󠀰󠀳󠀶󠁿 Flag for Idrija (SI-036) 🏴󠁳󠁩󠀰󠀴󠀵󠁿 Flag for Kidričevo (SI-045) 🏴󠁳󠁩󠀰󠀴󠀶󠁿 Flag for Kobarid (SI-046) 🏴󠁳󠁩󠀰󠀴󠀷󠁿 Flag for Kobilje (SI-047) 🏴󠁳󠁩󠀰󠀵󠀰󠁿 Flag for Koper (SI-050) 🏴󠁳󠁩󠀰󠀳󠀷󠁿 Flag for Ig (SI-037) 🏴󠁳󠁩󠀰󠀵󠀵󠁿 Flag for Kungota (SI-055) 🏴󠁳󠁩󠀰󠀳󠀲󠁿 Flag for Grosuplje (SI-032) 🏴󠁳󠁩󠀰󠀲󠀱󠁿 Flag for Dobrova–Polhov Gradec (SI-021) 🏴󠁳󠁩󠀰󠀴󠀲󠁿 Flag for Juršinci (SI-042) 🏴󠁳󠁩󠀰󠀵󠀴󠁿 Flag for Krško (SI-054) 🏴󠁳󠁩󠀰󠀳󠀳󠁿 Flag for Šalovci (SI-033) 🏴󠁳󠁩󠀰󠀵󠀳󠁿 Flag for Kranjska Gora (SI-053) 🏴󠁳󠁩󠀰󠀴󠀸󠁿 Flag for Kočevje (SI-048) 🏴󠁳󠁩󠀰󠀳󠀸󠁿 Flag for Ilirska Bistrica (SI-038) 🏴󠁳󠁩󠀰󠀴󠀳󠁿 Flag for Kamnik (SI-043) 🏴󠁳󠁩󠀰󠀳󠀵󠁿 Flag for Hrpelje–Kozina (SI-035) 🏴󠁳󠁩󠀰󠀳󠀰󠁿 Flag for Gornji Grad (SI-030) 🏴󠁳󠁩󠀰󠀴󠀴󠁿 Flag for Kanal (SI-044) 🏴󠁳󠁩󠀰󠀲󠀲󠁿 Flag for Dol pri Ljubljani (SI-022) 🏴󠁳󠁩󠀰󠀸󠀹󠁿 Flag for Pesnica (SI-089) 🏴󠁳󠁩󠀰󠀹󠀰󠁿 Flag for Piran (SI-090) 🏴󠁳󠁩󠀰󠀷󠀴󠁿 Flag for Mežica (SI-074) 🏴󠁳󠁩󠀰󠀸󠀱󠁿 Flag for Muta (SI-081) 🏴󠁳󠁩󠀰󠀶󠀲󠁿 Flag for Ljubno (SI-062) 🏴󠁳󠁩󠀰󠀸󠀷󠁿 Flag for Ormož (SI-087) 🏴󠁳󠁩󠀰󠀹󠀴󠁿 Flag for Postojna (SI-094) 🏴󠁳󠁩󠀰󠀷󠀶󠁿 Flag for Mislinja (SI-076) 👩🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁳󠁩󠀰󠀶󠀹󠁿 Flag for Majšperk (SI-069) 🏴󠁳󠁩󠀰󠀷󠀲󠁿 Flag for Mengeš (SI-072) 🏴󠁳󠁩󠀰󠀷󠀳󠁿 Flag for Metlika (SI-073) 🏴󠁳󠁩󠀰󠀷󠀷󠁿 Flag for Moravče (SI-077) 🏴󠁳󠁩󠀰󠀷󠀸󠁿 Flag for Moravske Toplice (SI-078) 🏴󠁳󠁩󠀰󠀶󠀱󠁿 Flag for Ljubljana (SI-061) 🏴󠁳󠁩󠀰󠀸󠀰󠁿 Flag for Murska Sobota (SI-080) 🏴󠁳󠁩󠀰󠀸󠀲󠁿 Flag for Naklo (SI-082) 🏴󠁳󠁩󠀰󠀸󠀴󠁿 Flag for Nova Gorica (SI-084) 🏴󠁳󠁩󠀰󠀸󠀸󠁿 Flag for Osilnica (SI-088) 🏴󠁳󠁩󠀰󠀹󠀱󠁿 Flag for Pivka (SI-091) 🏴󠁳󠁩󠀰󠀸󠀳󠁿 Flag for Nazarje (SI-083) 🏴󠁳󠁩󠀰󠀷󠀵󠁿 Flag for Miren–Kostanjevica (SI-075) 🏴󠁳󠁩󠀰󠀶󠀴󠁿 Flag for Logatec (SI-064) 🏴󠁳󠁩󠀰󠀶󠀰󠁿 Flag for Litija (SI-060) 🏴󠁳󠁩󠀰󠀷󠀰󠁿 Flag for Maribor (SI-070) 🏴󠁳󠁩󠀰󠀶󠀳󠁿 Flag for Ljutomer (SI-063) 🏴󠁳󠁩󠀰󠀶󠀶󠁿 Flag for Loški Potok (SI-066) 🏴󠁳󠁩󠀰󠀶󠀷󠁿 Flag for Luče (SI-067) 🏴󠁳󠁩󠀰󠀹󠀲󠁿 Flag for Podčetrtek (SI-092) 🏴󠁳󠁩󠀰󠀹󠀳󠁿 Flag for Podvelka (SI-093) 🏴󠁳󠁩󠀰󠀷󠀱󠁿 Flag for Medvode (SI-071) 🏴󠁳󠁩󠀰󠀶󠀵󠁿 Flag for Loška Dolina (SI-065) 🏴󠁳󠁩󠀰󠀵󠀷󠁿 Flag for Laško (SI-057) 🏴󠁳󠁩󠀰󠀵󠀹󠁿 Flag for Lendava (SI-059) 🏴󠁳󠁩󠀰󠀷󠀹󠁿 Flag for Mozirje (SI-079) 🏴󠁳󠁩󠀰󠀶󠀸󠁿 Flag for Lukovica (SI-068) 🏴󠁳󠁩󠀱󠀳󠀱󠁿 Flag for Tržič (SI-131) 🏴󠁳󠁩󠀱󠀱󠀸󠁿 Flag for Šentilj (SI-118) 🏴󠁳󠁩󠀰󠀹󠀸󠁿 Flag for Rače–Fram (SI-098) 🏴󠁳󠁩󠀰󠀹󠀷󠁿 Flag for Puconci (SI-097) 👩🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁳󠁩󠀱󠀰󠀵󠁿 Flag for Rogašovci (SI-105) 🏴󠁳󠁩󠀱󠀱󠀳󠁿 Flag for Slovenska Bistrica (SI-113) 🏴󠁳󠁩󠀱󠀰󠀷󠁿 Flag for Rogatec (SI-107) 🏴󠁳󠁩󠀰󠀹󠀶󠁿 Flag for Ptuj (SI-096) 🏴󠁳󠁩󠀱󠀱󠀹󠁿 Flag for Šentjernej (SI-119) 🏴󠁳󠁩󠀱󠀱󠀱󠁿 Flag for Sežana (SI-111) 🏴󠁳󠁩󠀱󠀲󠀳󠁿 Flag for Škofljica (SI-123) 🏴󠁳󠁩󠀱󠀱󠀲󠁿 Flag for Slovenj Gradec (SI-112) 🏴󠁳󠁩󠀱󠀱󠀵󠁿 Flag for Starše (SI-115) 🏴󠁳󠁩󠀱󠀱󠀶󠁿 Flag for Sveti Jurij (SI-116) 🏴󠁳󠁩󠀱󠀳󠀰󠁿 Flag for Trebnje (SI-130) 🏴󠁳󠁩󠀱󠀱󠀰󠁿 Flag for Sevnica (SI-110) 🏴󠁳󠁩󠀰󠀹󠀹󠁿 Flag for Radeče (SI-099) 🏴󠁳󠁩󠀱󠀲󠀱󠁿 Flag for Škocjan (SI-121) 🏴󠁳󠁩󠀱󠀲󠀴󠁿 Flag for Šmarje pri Jelšah (SI-124) 🏴󠁳󠁩󠀱󠀲󠀶󠁿 Flag for Šoštanj (SI-126) 🏴󠁳󠁩󠀱󠀲󠀷󠁿 Flag for Štore (SI-127) 🏴󠁳󠁩󠀱󠀰󠀶󠁿 Flag for Rogaška Slatina (SI-106) 🏴󠁳󠁩󠀰󠀹󠀵󠁿 Flag for Preddvor (SI-095) 🏴󠁳󠁩󠀱󠀳󠀲󠁿 Flag for Turnišče (SI-132) 🏴󠁳󠁩󠀱󠀰󠀲󠁿 Flag for Radovljica (SI-102) 🏴󠁳󠁩󠀱󠀰󠀸󠁿 Flag for Ruše (SI-108) 🏴󠁳󠁩󠀱󠀱󠀴󠁿 Flag for Slovenske Konjice (SI-114) 🏴󠁳󠁩󠀱󠀲󠀰󠁿 Flag for Šentjur (SI-120) 🏴󠁳󠁩󠀱󠀲󠀸󠁿 Flag for Tolmin (SI-128) 🏴󠁳󠁩󠀱󠀰󠀴󠁿 Flag for Ribnica (SI-104) 🏴󠁳󠁩󠀱󠀰󠀱󠁿 Flag for Radlje ob Dravi (SI-101) 🏴󠁳󠁩󠀱󠀲󠀹󠁿 Flag for Trbovlje (SI-129) 🏴󠁳󠁩󠀱󠀰󠀹󠁿 Flag for Semič (SI-109) 🏴󠁳󠁩󠀱󠀱󠀷󠁿 Flag for Šenčur (SI-117) 🏴󠁳󠁩󠀱󠀰󠀳󠁿 Flag for Ravne na Koroškem (SI-103) 🏴󠁳󠁩󠀱󠀶󠀹󠁿 Flag for Miklavž na Dravskem Polju (SI-169) 🏴󠁳󠁩󠀱󠀳󠀸󠁿 Flag for Vodice (SI-138) 🏴󠁳󠁩󠀱󠀳󠀳󠁿 Flag for Velenje (SI-133) 🏴󠁳󠁩󠀱󠀴󠀲󠁿 Flag for Zagorje ob Savi (SI-142) 🏴󠁳󠁩󠀱󠀴󠀱󠁿 Flag for Vuzenica (SI-141) 🏴󠁳󠁩󠀱󠀴󠀰󠁿 Flag for Vrhnika (SI-140) 👩🏻‍👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone 🏴󠁳󠁩󠀱󠀴󠀶󠁿 Flag for Železniki (SI-146) 🏴󠁳󠁩󠀱󠀴󠀷󠁿 Flag for Žiri (SI-147) 🏴󠁳󠁩󠀱󠀴󠀸󠁿 Flag for Benedikt (SI-148) 🏴󠁳󠁩󠀱󠀳󠀴󠁿 Flag for Velike Lašče (SI-134) 🏴󠁳󠁩󠀱󠀳󠀷󠁿 Flag for Vitanje (SI-137) 🏴󠁳󠁩󠀱󠀶󠀴󠁿 Flag for Komenda (SI-164) 🏴󠁳󠁩󠀱󠀵󠀵󠁿 Flag for Dobrna (SI-155) 🏴󠁳󠁩󠀱󠀵󠀶󠁿 Flag for Dobrovnik (SI-156) 🏴󠁳󠁩󠀱󠀵󠀷󠁿 Flag for Dolenjske Toplice (SI-157) 🏴󠁳󠁩󠀱󠀵󠀹󠁿 Flag for Hajdina (SI-159) 🏴󠁳󠁩󠀱󠀷󠀱󠁿 Flag for Oplotnica (SI-171) 🏴󠁳󠁩󠀱󠀳󠀵󠁿 Flag for Videm (SI-135) 🏴󠁳󠁩󠀱󠀶󠀳󠁿 Flag for Jezersko (SI-163) 🏴󠁳󠁩󠀱󠀵󠀲󠁿 Flag for Cankova (SI-152) 🏴󠁳󠁩󠀱󠀶󠀵󠁿 Flag for Kostel (SI-165) 🏴󠁳󠁩󠀱󠀶󠀶󠁿 Flag for Križevci (SI-166) 🏴󠁳󠁩󠀱󠀳󠀹󠁿 Flag for Vojnik (SI-139) 🏴󠁳󠁩󠀱󠀶󠀸󠁿 Flag for Markovci (SI-168) 🏴󠁳󠁩󠀱󠀷󠀰󠁿 Flag for Mirna Peč (SI-170) 🏴󠁳󠁩󠀱󠀳󠀶󠁿 Flag for Vipava (SI-136) 🏴󠁳󠁩󠀱󠀶󠀲󠁿 Flag for Horjul (SI-162) 🏴󠁳󠁩󠀱󠀵󠀳󠁿 Flag for Cerkvenjak (SI-153) 🏴󠁳󠁩󠀱󠀵󠀰󠁿 Flag for Bloke (SI-150) 🏴󠁳󠁩󠀱󠀴󠀳󠁿 Flag for Zavrč (SI-143) 🏴󠁳󠁩󠀱󠀴󠀹󠁿 Flag for Bistrica ob Sotli (SI-149) 🏴󠁳󠁩󠀱󠀴󠀴󠁿 Flag for Zreče (SI-144) 🏴󠁳󠁩󠀱󠀶󠀱󠁿 Flag for Hodoš (SI-161) 🏴󠁳󠁩󠀱󠀶󠀰󠁿 Flag for Hoče–Slivnica (SI-160) 🏴󠁳󠁩󠀱󠀵󠀸󠁿 Flag for Grad (SI-158) 🏴󠁳󠁩󠀱󠀷󠀲󠁿 Flag for Podlehnik (SI-172) 🏴󠁳󠁩󠀱󠀹󠀶󠁿 Flag for Cirkulane (SI-196) 🏴󠁳󠁩󠀱󠀷󠀴󠁿 Flag for Prebold (SI-174) 🏴󠁳󠁩󠀱󠀷󠀶󠁿 Flag for Razkrižje (SI-176) 🏴󠁳󠁩󠀱󠀸󠀸󠁿 Flag for Veržej (SI-188) 🏴󠁳󠁩󠀱󠀹󠀰󠁿 Flag for Žalec (SI-190) 🏴󠁳󠁩󠀱󠀸󠀰󠁿 Flag for Solčava (SI-180) 🏴󠁳󠁩󠀱󠀸󠀱󠁿 Flag for Sveta Ana (SI-181) 🏴󠁳󠁩󠀱󠀸󠀳󠁿 Flag for Šempeter–Vrtojba (SI-183) 🏴󠁳󠁩󠀱󠀸󠀵󠁿 Flag for Trnovska Vas (SI-185) 🏴󠁳󠁩󠀱󠀷󠀹󠁿 Flag for Sodražica (SI-179) 🏴󠁳󠁩󠀱󠀹󠀸󠁿 Flag for Makole (SI-198) 🏴󠁳󠁩󠀲󠀰󠀳󠁿 Flag for Straža (SI-203) 🏴󠁳󠁩󠀱󠀷󠀸󠁿 Flag for Selnica ob Dravi (SI-178) 🏴󠁳󠁩󠀱󠀹󠀳󠁿 Flag for Žužemberk (SI-193) 🏴󠁳󠁩󠀱󠀹󠀷󠁿 Flag for Kostanjevica na Krki (SI-197) 🏴󠁳󠁩󠀱󠀷󠀵󠁿 Flag for Prevalje (SI-175) 🏴󠁳󠁩󠀱󠀹󠀴󠁿 Flag for Šmartno pri Litiji (SI-194) 🏴󠁳󠁩󠀱󠀹󠀱󠁿 Flag for Žetale (SI-191) 🏴󠁳󠁩󠀱󠀸󠀹󠁿 Flag for Vransko (SI-189) 🏴󠁳󠁩󠀲󠀰󠀱󠁿 Flag for Renče–Vogrsko (SI-201) 🏴󠁳󠁩󠀲󠀰󠀲󠁿 Flag for Središče ob Dravi (SI-202) 🏴󠁳󠁩󠀱󠀸󠀶󠁿 Flag for Trzin (SI-186) 🏴󠁳󠁩󠀲󠀰󠀴󠁿 Flag for Sveta Trojica v Slovenskih Goricah (SI-204) 🏴󠁳󠁩󠀲󠀰󠀵󠁿 Flag for Sveti Tomaž (SI-205) 🏴󠁳󠁩󠀱󠀷󠀷󠁿 Flag for Ribnica na Pohorju (SI-177) 🏴󠁳󠁩󠀲󠀰󠀷󠁿 Flag for Gorje (SI-207) 🏴󠁳󠁩󠀱󠀸󠀴󠁿 Flag for Tabor (SI-184) 🏴󠁳󠁩󠀱󠀹󠀹󠁿 Flag for Mokronog–Trebelno (SI-199) 🏴󠁳󠁩󠀱󠀷󠀳󠁿 Flag for Polzela (SI-173) 🏴󠁳󠁩󠀲󠀰󠀰󠁿 Flag for Poljčane (SI-200) 🏴󠁳󠁩󠀱󠀹󠀵󠁿 Flag for Apače (SI-195) 🏴󠁳󠁩󠀱󠀸󠀷󠁿 Flag for Velika Polana (SI-187) 🏴󠁳󠁫󠁴󠁡󠁿 Flag for Trnava (SK-TA) 🏴󠁳󠁩󠀲󠀰󠀹󠁿 Flag for Rečica ob Savinji (SI-209) 🏴󠁳󠁭󠀰󠀹󠁿 Flag for Serravalle (SM-09) 🏴󠁳󠁭󠀰󠀲󠁿 Flag for Chiesanuova (SM-02) 🏴󠁳󠁮󠁫󠁡󠁿 Flag for Kaffrine (SN-KA) 🏴󠁳󠁫󠁮󠁩󠁿 Flag for Nitra (SK-NI) 🏴󠁳󠁩󠀲󠀱󠀱󠁿 Flag for Šentrupert (SI-211) 🏴󠁳󠁭󠀰󠀶󠁿 Flag for Borgo Maggiore (SM-06) 🏴󠁳󠁫󠁫󠁩󠁿 Flag for Košice (SK-KI) 🏴󠁳󠁫󠁢󠁣󠁿 Flag for Banská Bystrica (SK-BC) 🏴󠁳󠁭󠀰󠀸󠁿 Flag for Montegiardino (SM-08) 🏴󠁳󠁮󠁤󠁫󠁿 Flag for Dakar (SN-DK) 🏴󠁳󠁫󠁰󠁶󠁿 Flag for Prešov (SK-PV) 🏴󠁳󠁩󠀲󠀱󠀲󠁿 Flag for Mirna (SI-212) 🏴󠁳󠁭󠀰󠀵󠁿 Flag for Fiorentino (SM-05) 🏴󠁳󠁮󠁴󠁨󠁿 Flag for Thiès (SN-TH) 🏴󠁳󠁩󠀲󠀱󠀳󠁿 Flag for Ankaran (SI-213) 🏴󠁳󠁮󠁴󠁣󠁿 Flag for Tambacounda (SN-TC) 🏴󠁳󠁮󠁦󠁫󠁿 Flag for Fatick (SN-FK) 🏴󠁳󠁫󠁴󠁣󠁿 Flag for Trenčín (SK-TC) 🏴󠁳󠁮󠁫󠁬󠁿 Flag for Kaolack (SN-KL) 🏴󠁳󠁭󠀰󠀴󠁿 Flag for Faetano (SM-04) 🏴󠁳󠁫󠁺󠁩󠁿 Flag for Žilina (SK-ZI) 🏴󠁳󠁬󠁳󠁿 Flag for Southern (SL-S) 🏴󠁳󠁮󠁳󠁥󠁿 Flag for Sédhiou (SN-SE) 🏴󠁳󠁫󠁢󠁬󠁿 Flag for Bratislava (SK-BL) 🏴󠁳󠁮󠁤󠁢󠁿 Flag for Diourbel (SN-DB) 🏴󠁳󠁮󠁫󠁥󠁿 Flag for Kédougou (SN-KE) 🏴󠁳󠁬󠁮󠁿 Flag for Northern (SL-N) 🏴󠁳󠁬󠁷󠁿 Flag for Western Area (SL-W) 🏴󠁳󠁮󠁭󠁴󠁿 Flag for Matam (SN-MT) 🏴󠁳󠁬󠁥󠁿 Flag for Eastern (SL-E) 🏴󠁳󠁭󠀰󠀱󠁿 Flag for Acquaviva (SM-01) 🏴󠁳󠁮󠁫󠁤󠁿 Flag for Kolda (SN-KD) 🏴󠁳󠁮󠁳󠁬󠁿 Flag for Saint-Louis (SN-SL) 🏴󠁳󠁭󠀰󠀷󠁿 Flag for San Marino (SM-07) 🏴󠁳󠁮󠁬󠁧󠁿 Flag for Louga (SN-LG) 🏴󠁳󠁭󠀰󠀳󠁿 Flag for Domagnano (SM-03) 🏴󠁳󠁳󠁥󠁥󠁿 Flag for Eastern Equatoria (SS-EE) 🏴󠁳󠁲󠁳󠁡󠁿 Flag for Saramacca (SR-SA) 👩🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁳󠁲󠁭󠁡󠁿 Flag for Marowijne (SR-MA) 🏴󠁳󠁯󠁪󠁤󠁿 Flag for Middle Juba (SO-JD) 🏴󠁳󠁯󠁭󠁵󠁿 Flag for Mudug (SO-MU) 🏴󠁳󠁯󠁳󠁨󠁿 Flag for Lower Shebelle (SO-SH) 🏴󠁳󠁯󠁨󠁩󠁿 Flag for Hiran (SO-HI) 🏴󠁳󠁳󠁥󠁣󠁿 Flag for Central Equatoria (SS-EC) 🏴󠁳󠁮󠁺󠁧󠁿 Flag for Ziguinchor (SN-ZG) 🏴󠁳󠁲󠁣󠁲󠁿 Flag for Coronie (SR-CR) 🏴󠁳󠁯󠁳󠁤󠁿 Flag for Middle Shebelle (SO-SD) 🏴󠁳󠁳󠁮󠁵󠁿 Flag for Upper Nile (SS-NU) 🏴󠁳󠁲󠁷󠁡󠁿 Flag for Wanica (SR-WA) 🏴󠁳󠁯󠁡󠁷󠁿 Flag for Awdal (SO-AW) 🏴󠁳󠁯󠁳󠁡󠁿 Flag for Sanaag (SO-SA) 🏴󠁳󠁯󠁪󠁨󠁿 Flag for Lower Juba (SO-JH) 🏴󠁳󠁳󠁬󠁫󠁿 Flag for Lakes (SS-LK) 🏴󠁳󠁳󠁷󠁲󠁿 Flag for Warrap (SS-WR) 🏴󠁳󠁴󠁰󠁿 Flag for Príncipe (ST-P) 🏴󠁳󠁲󠁳󠁩󠁿 Flag for Sipaliwini (SR-SI) 🏴󠁳󠁳󠁢󠁷󠁿 Flag for Western Bahr el Ghazal (SS-BW) 🏴󠁳󠁳󠁥󠁷󠁿 Flag for Western Equatoria (SS-EW) 🏴󠁳󠁯󠁢󠁲󠁿 Flag for Bari (SO-BR) 🏴󠁳󠁳󠁪󠁧󠁿 Flag for Jonglei (SS-JG) 🏴󠁳󠁲󠁰󠁭󠁿 Flag for Paramaribo (SR-PM) 🏴󠁳󠁲󠁣󠁭󠁿 Flag for Commewijne (SR-CM) 🏴󠁳󠁯󠁧󠁡󠁿 Flag for Galguduud (SO-GA) 🏴󠁳󠁲󠁮󠁩󠁿 Flag for Nickerie (SR-NI) 🏴󠁳󠁲󠁰󠁲󠁿 Flag for Para (SR-PR) 🏴󠁳󠁯󠁷󠁯󠁿 Flag for Woqooyi Galbeed (SO-WO) 🏴󠁳󠁯󠁧󠁥󠁿 Flag for Gedo (SO-GE) 🏴󠁳󠁯󠁢󠁹󠁿 Flag for Bay, Somalia (SO-BY) 🏴󠁳󠁲󠁢󠁲󠁿 Flag for Brokopondo (SR-BR) 🏴󠁳󠁯󠁮󠁵󠁿 Flag for Nugal (SO-NU) 🏴󠁳󠁯󠁴󠁯󠁿 Flag for Togdheer (SO-TO) 🏴󠁳󠁯󠁢󠁫󠁿 Flag for Bakool (SO-BK) 🏴󠁳󠁯󠁳󠁯󠁿 Flag for Sool (SO-SO) 🏴󠁳󠁺󠁨󠁨󠁿 Flag for Hhohho (SZ-HH) 🏴󠁴󠁤󠁥󠁯󠁿 Flag for Ennedi-Ouest (TD-EO) 🏴󠁴󠁤󠁧󠁲󠁿 Flag for Guéra (TD-GR) 🏴󠁳󠁺󠁳󠁨󠁿 Flag for Shiselweni (SZ-SH) 🏴󠁳󠁹󠁤󠁲󠁿 Flag for Daraa (SY-DR) 🏴󠁳󠁹󠁲󠁡󠁿 Flag for Ar-Raqqah (SY-RA) 🏴󠁳󠁶󠁳󠁯󠁿 Flag for Sonsonate (SV-SO) 🏴󠁳󠁶󠁵󠁮󠁿 Flag for La Unión (SV-UN) 🏴󠁳󠁶󠁳󠁭󠁿 Flag for San Miguel (SV-SM) 🏴󠁳󠁶󠁭󠁯󠁿 Flag for Morazán (SV-MO) 🏴󠁳󠁶󠁳󠁳󠁿 Flag for San Salvador (SV-SS) 🏴󠁳󠁹󠁤󠁹󠁿 Flag for Deir ez-Zor (SY-DY) 🏴󠁳󠁶󠁣󠁡󠁿 Flag for Cabañas (SV-CA) 🏴󠁳󠁺󠁬󠁵󠁿 Flag for Lubombo (SZ-LU) 🏴󠁳󠁶󠁣󠁨󠁿 Flag for Chalatenango (SV-CH) 🏴󠁳󠁹󠁲󠁤󠁿 Flag for Rif Dimashq (SY-RD) 🏴󠁳󠁹󠁴󠁡󠁿 Flag for Tartus (SY-TA) 🏴󠁴󠁤󠁢󠁯󠁿 Flag for Borkou (TD-BO) 🏴󠁳󠁺󠁭󠁡󠁿 Flag for Manzini (SZ-MA) 🏴󠁴󠁤󠁢󠁡󠁿 Flag for Batha (TD-BA) 🏴󠁳󠁹󠁨󠁩󠁿 Flag for Homs (SY-HI) 🏴󠁴󠁤󠁥󠁥󠁿 Flag for Ennedi-Est (TD-EE) 🏴󠁴󠁤󠁢󠁧󠁿 Flag for Bahr el Gazel (TD-BG) 🏴󠁴󠁤󠁫󠁡󠁿 Flag for Kanem (TD-KA) 🏴󠁳󠁹󠁨󠁭󠁿 Flag for Hama (SY-HM) 🏴󠁳󠁹󠁬󠁡󠁿 Flag for Latakia (SY-LA) 🏴󠁳󠁹󠁩󠁤󠁿 Flag for Idlib (SY-ID) 🏴󠁳󠁶󠁬󠁩󠁿 Flag for La Libertad (SV-LI) 🏴󠁳󠁹󠁨󠁬󠁿 Flag for Aleppo (SY-HL) 🏴󠁳󠁶󠁡󠁨󠁿 Flag for Ahuachapán (SV-AH) 🏴󠁴󠁤󠁣󠁢󠁿 Flag for Chari-Baguirmi (TD-CB) 🏴󠁳󠁶󠁰󠁡󠁿 Flag for La Paz (SV-PA) 🏴󠁳󠁹󠁳󠁵󠁿 Flag for As-Suwayda (SY-SU) 🏴󠁳󠁹󠁤󠁩󠁿 Flag for Damascus (SY-DI) 🏴󠁳󠁹󠁱󠁵󠁿 Flag for Quneitra (SY-QU) 🏴󠁳󠁹󠁨󠁡󠁿 Flag for Al-Hasakah (SY-HA) 🏴󠁳󠁶󠁳󠁡󠁿 Flag for Santa Ana (SV-SA) 🏴󠁳󠁶󠁣󠁵󠁿 Flag for Cuscatlán (SV-CU) 🏴󠁴󠁤󠁬󠁯󠁿 Flag for Logone Occidental (TD-LO) 🏴󠁴󠁨󠀲󠀲󠁿 Flag for Chanthaburi (TH-22) 🏴󠁴󠁤󠁭󠁥󠁿 Flag for Mayo-Kebbi Est (TD-ME) 🏴󠁴󠁤󠁭󠁣󠁿 Flag for Moyen-Chari (TD-MC) 🏴󠁴󠁤󠁬󠁲󠁿 Flag for Logone Oriental (TD-LR) 🏴󠁴󠁧󠁳󠁿 Flag for Savanes (TG-S) 🏴󠁴󠁨󠀱󠀴󠁿 Flag for Phra Nakhon Si Ayutthaya (TH-14) 🏴󠁴󠁧󠁣󠁿 Flag for Centrale (TG-C) 🏴󠁴󠁨󠀲󠀷󠁿 Flag for Sa Kaeo (TH-27) 🏴󠁴󠁨󠀱󠀲󠁿 Flag for Nonthaburi (TH-12) 🏴󠁴󠁨󠀳󠀱󠁿 Flag for Buri Ram (TH-31) 🏴󠁴󠁨󠀲󠀰󠁿 Flag for Chon Buri (TH-20) 🏴󠁴󠁤󠁳󠁩󠁿 Flag for Sila (TD-SI) 🏴󠁴󠁤󠁬󠁣󠁿 Flag for Lac (TD-LC) 🏴󠁴󠁨󠀲󠀱󠁿 Flag for Rayong (TH-21) 🏴󠁴󠁨󠀲󠀵󠁿 Flag for Prachin Buri (TH-25) 🏴󠁴󠁨󠀳󠀰󠁿 Flag for Nakhon Ratchasima (TH-30) 🏴󠁴󠁧󠁫󠁿 Flag for Kara (TG-K) 🏴󠁴󠁨󠀱󠀵󠁿 Flag for Ang Thong (TH-15) 🏴󠁴󠁨󠀱󠀰󠁿 Flag for Bangkok (TH-10) 🏴󠁴󠁤󠁭󠁡󠁿 Flag for Mandoul (TD-MA) 🏴󠁴󠁨󠀱󠀳󠁿 Flag for Pathum Thani (TH-13) 🏴󠁴󠁨󠀲󠀴󠁿 Flag for Chachoengsao (TH-24) 🏴󠁴󠁨󠀱󠀷󠁿 Flag for Sing Buri (TH-17) 🏴󠁴󠁤󠁭󠁯󠁿 Flag for Mayo-Kebbi Ouest (TD-MO) 🏴󠁴󠁤󠁯󠁤󠁿 Flag for Ouaddaï (TD-OD) 🏴󠁴󠁨󠀳󠀲󠁿 Flag for Surin (TH-32) 🏴󠁴󠁨󠀲󠀶󠁿 Flag for Nakhon Nayok (TH-26) 🏴󠁴󠁤󠁳󠁡󠁿 Flag for Salamat (TD-SA) 🏴󠁴󠁤󠁴󠁡󠁿 Flag for Tandjilé (TD-TA) 🏴󠁴󠁤󠁷󠁦󠁿 Flag for Wadi Fira (TD-WF) 🏴󠁴󠁨󠀱󠀹󠁿 Flag for Saraburi (TH-19) 🏴󠁴󠁨󠀱󠀱󠁿 Flag for Samut Prakan (TH-11) 🏴󠁴󠁤󠁴󠁩󠁿 Flag for Tibesti (TD-TI) 🏴󠁴󠁧󠁰󠁿 Flag for Plateaux (TG-P) 🏴󠁴󠁤󠁮󠁤󠁿 Flag for N’Djamena (TD-ND) 🏴󠁴󠁨󠀱󠀸󠁿 Flag for Chai Nat (TH-18) 🏴󠁴󠁨󠀶󠀲󠁿 Flag for Kamphaeng Phet (TH-62) 🏴󠁴󠁨󠀷󠀲󠁿 Flag for Suphanburi (TH-72) 🏴󠁴󠁨󠀷󠀴󠁿 Flag for Samut Sakhon (TH-74) 🏴󠁴󠁨󠀶󠀷󠁿 Flag for Phetchabun (TH-67) 🏴󠁴󠁨󠀷󠀱󠁿 Flag for Kanchanaburi (TH-71) 🏴󠁴󠁨󠀵󠀴󠁿 Flag for Phrae (TH-54) 🏴󠁴󠁨󠀶󠀳󠁿 Flag for Tak (TH-63) 🏴󠁴󠁨󠀴󠀸󠁿 Flag for Nakhon Phanom (TH-48) 🏴󠁴󠁨󠀵󠀲󠁿 Flag for Lampang (TH-52) 🏴󠁴󠁨󠀵󠀸󠁿 Flag for Mae Hong Son (TH-58) 🏴󠁴󠁨󠀴󠀷󠁿 Flag for Sakon Nakhon (TH-47) 🏴󠁴󠁨󠀵󠀶󠁿 Flag for Phayao (TH-56) 🏴󠁴󠁨󠀴󠀱󠁿 Flag for Udon Thani (TH-41) 🏴󠁴󠁨󠀴󠀹󠁿 Flag for Mukdahan (TH-49) 🏴󠁴󠁨󠀷󠀳󠁿 Flag for Nakhon Pathom (TH-73) 🏴󠁴󠁨󠀵󠀰󠁿 Flag for Chiang Mai (TH-50) 🏴󠁴󠁨󠀴󠀰󠁿 Flag for Khon Kaen (TH-40) 🏴󠁴󠁨󠀳󠀷󠁿 Flag for Amnat Charoen (TH-37) 🏴󠁴󠁨󠀷󠀰󠁿 Flag for Ratchaburi (TH-70) 🏴󠁴󠁨󠀳󠀵󠁿 Flag for Yasothon (TH-35) 🏴󠁴󠁨󠀵󠀱󠁿 Flag for Lamphun (TH-51) 🏴󠁴󠁨󠀴󠀲󠁿 Flag for Loei (TH-42) 🏴󠁴󠁨󠀶󠀰󠁿 Flag for Nakhon Sawan (TH-60) 🏴󠁴󠁨󠀳󠀴󠁿 Flag for Ubon Ratchathani (TH-34) 🏴󠁴󠁨󠀴󠀴󠁿 Flag for Maha Sarakham (TH-44) 🏴󠁴󠁨󠀴󠀵󠁿 Flag for Roi Et (TH-45) 🏴󠁴󠁨󠀴󠀶󠁿 Flag for Kalasin (TH-46) 🏴󠁴󠁨󠀶󠀶󠁿 Flag for Phichit (TH-66) 🏴󠁴󠁨󠀵󠀵󠁿 Flag for Nan (TH-55) 🏴󠁴󠁨󠀶󠀱󠁿 Flag for Uthai Thani (TH-61) 🏴󠁴󠁨󠀳󠀸󠁿 Flag for Bueng Kan (TH-38) 🏴󠁴󠁨󠀳󠀳󠁿 Flag for Si Sa Ket (TH-33) 🏴󠁴󠁨󠀳󠀹󠁿 Flag for Nong Bua Lam Phu (TH-39) 🏴󠁴󠁨󠀵󠀳󠁿 Flag for Uttaradit (TH-53) 🏴󠁴󠁨󠀵󠀷󠁿 Flag for Chiang Rai (TH-57) 🏴󠁴󠁨󠀶󠀴󠁿 Flag for Sukhothai (TH-64) 🏴󠁴󠁨󠀴󠀳󠁿 Flag for Nong Khai (TH-43) 🏴󠁴󠁨󠀶󠀵󠁿 Flag for Phitsanulok (TH-65) 🏴󠁴󠁬󠁥󠁲󠁿 Flag for Ermera (TL-ER) 🏴󠁴󠁬󠁯󠁥󠁿 Flag for Oecusse (TL-OE) 🏴󠁴󠁬󠁬󠁩󠁿 Flag for Liquiçá (TL-LI) 🏴󠁴󠁬󠁡󠁬󠁿 Flag for Aileu (TL-AL) 🏴󠁴󠁭󠁡󠁿 Flag for Ahal (TM-A) 🏴󠁴󠁨󠀸󠀴󠁿 Flag for Surat Thani (TH-84) 🏴󠁴󠁨󠀷󠀶󠁿 Flag for Phetchaburi (TH-76) 🏴󠁴󠁬󠁢󠁯󠁿 Flag for Bobonaro (TL-BO) 🏴󠁴󠁬󠁭󠁴󠁿 Flag for Manatuto (TL-MT) 🏴󠁴󠁪󠁫󠁴󠁿 Flag for Khatlon (TJ-KT) 🏴󠁴󠁬󠁡󠁮󠁿 Flag for Ainaro (TL-AN) 🏴󠁴󠁨󠀸󠀲󠁿 Flag for Phang Nga (TH-82) 🏴󠁴󠁬󠁣󠁯󠁿 Flag for Cova Lima (TL-CO) 🏴󠁴󠁮󠀱󠀱󠁿 Flag for Tunis (TN-11) 🏴󠁴󠁨󠀸󠀵󠁿 Flag for Ranong (TH-85) 🏴󠁴󠁨󠀸󠀰󠁿 Flag for Nakhon Si Thammarat (TH-80) 🏴󠁴󠁨󠀷󠀷󠁿 Flag for Prachuap Khiri Khan (TH-77) 🏴󠁴󠁪󠁤󠁵󠁿 Flag for Dushanbe (TJ-DU) 🏴󠁴󠁨󠀹󠀵󠁿 Flag for Yala (TH-95) 🏴󠁴󠁨󠀹󠀰󠁿 Flag for Songkhla (TH-90) 🏴󠁴󠁭󠁬󠁿 Flag for Lebap (TM-L) 🏴󠁴󠁨󠀹󠀶󠁿 Flag for Narathiwat (TH-96) 🏴󠁴󠁭󠁭󠁿 Flag for Mary (TM-M) 🏴󠁴󠁬󠁭󠁦󠁿 Flag for Manufahi (TL-MF) 👨🏼‍👨🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁴󠁭󠁢󠁿 Flag for Balkan (TM-B) 🏴󠁴󠁬󠁢󠁡󠁿 Flag for Baucau (TL-BA) 🏴󠁴󠁪󠁲󠁡󠁿 Flag for Nohiyahoi Tobei Jumhurí (TJ-RA) 🏴󠁴󠁨󠀹󠀲󠁿 Flag for Trang (TH-92) 🏴󠁴󠁪󠁳󠁵󠁿 Flag for Sughd (TJ-SU) 🏴󠁴󠁬󠁶󠁩󠁿 Flag for Viqueque (TL-VI) 🏴󠁴󠁨󠀹󠀴󠁿 Flag for Pattani (TH-94) 🏴󠁴󠁨󠀸󠀱󠁿 Flag for Krabi (TH-81) 🏴󠁴󠁬󠁤󠁩󠁿 Flag for Dili (TL-DI) 🏴󠁴󠁨󠀸󠀳󠁿 Flag for Phuket (TH-83) 🏴󠁴󠁨󠀹󠀱󠁿 Flag for Satun (TH-91) 🏴󠁴󠁨󠁳󠁿 Flag for Pattaya (TH-S) 🏴󠁴󠁭󠁤󠁿 Flag for Daşoguz (TM-D) 🏴󠁴󠁮󠀴󠀱󠁿 Flag for Kairouan (TN-41) 🏴󠁴󠁮󠀵󠀲󠁿 Flag for Monastir (TN-52) 🏴󠁴󠁲󠀰󠀹󠁿 Flag for Aydın (TR-09) 🏴󠁴󠁮󠀳󠀱󠁿 Flag for Béja (TN-31) 🏴󠁴󠁲󠀰󠀷󠁿 Flag for Antalya (TR-07) 🏴󠁴󠁮󠀲󠀱󠁿 Flag for Nabeul (TN-21) 🏴󠁴󠁮󠀵󠀳󠁿 Flag for Mahdia (TN-53) 🏴󠁴󠁯󠀰󠀲󠁿 Flag for Haʻapai (TO-02) 🏴󠁴󠁲󠀰󠀵󠁿 Flag for Amasya (TR-05) 🏴󠁴󠁲󠀱󠀳󠁿 Flag for Bitlis (TR-13) 🏴󠁴󠁮󠀱󠀲󠁿 Flag for Ariana (TN-12) 🏴󠁴󠁮󠀷󠀳󠁿 Flag for Kebili (TN-73) 🏴󠁴󠁲󠀰󠀱󠁿 Flag for Adana (TR-01) 🏴󠁴󠁯󠀰󠀱󠁿 Flag for ʻEua (TO-01) 🏴󠁴󠁲󠀱󠀲󠁿 Flag for Bingöl (TR-12) 🏴󠁴󠁮󠀸󠀳󠁿 Flag for Tataouine (TN-83) 🏴󠁴󠁲󠀰󠀸󠁿 Flag for Artvin (TR-08) 🏴󠁴󠁮󠀵󠀱󠁿 Flag for Sousse (TN-51) 🏴󠁴󠁮󠀸󠀱󠁿 Flag for Gabès (TN-81) 🏴󠁴󠁲󠀰󠀴󠁿 Flag for Ağrı (TR-04) 🏴󠁴󠁲󠀱󠀱󠁿 Flag for Bilecik (TR-11) 🏴󠁴󠁮󠀳󠀲󠁿 Flag for Jendouba (TN-32) 🏴󠁴󠁯󠀰󠀴󠁿 Flag for Tongatapu (TO-04) 🏴󠁴󠁲󠀰󠀲󠁿 Flag for Adıyaman (TR-02) 🏴󠁴󠁮󠀳󠀳󠁿 Flag for Kef (TN-33) 🏴󠁴󠁮󠀲󠀲󠁿 Flag for Zaghouan (TN-22) 🏴󠁴󠁲󠀱󠀰󠁿 Flag for Balıkesir (TR-10) 🏴󠁴󠁮󠀱󠀳󠁿 Flag for Ben Arous (TN-13) 🏴󠁴󠁯󠀰󠀳󠁿 Flag for Niuas (TO-03) 🏴󠁴󠁮󠀷󠀲󠁿 Flag for Tozeur (TN-72) 🏴󠁴󠁮󠀱󠀴󠁿 Flag for Manouba (TN-14) 🏴󠁴󠁮󠀴󠀲󠁿 Flag for Kasserine (TN-42) 🏴󠁴󠁲󠀱󠀴󠁿 Flag for Bolu (TR-14) 🏴󠁴󠁮󠀳󠀴󠁿 Flag for Siliana (TN-34) 🏴󠁴󠁯󠀰󠀵󠁿 Flag for Vavaʻu (TO-05) 🏴󠁴󠁲󠀰󠀶󠁿 Flag for Ankara (TR-06) 🏴󠁴󠁮󠀶󠀱󠁿 Flag for Sfax (TN-61) 🏴󠁴󠁮󠀴󠀳󠁿 Flag for Sidi Bouzid (TN-43) 🏴󠁴󠁮󠀸󠀲󠁿 Flag for Medenine (TN-82) 🏴󠁴󠁮󠀲󠀳󠁿 Flag for Bizerte (TN-23) 🏴󠁴󠁲󠀲󠀴󠁿 Flag for Erzincan (TR-24) 🏴󠁴󠁲󠀴󠀶󠁿 Flag for Kahramanmaraş (TR-46) 🏴󠁴󠁲󠀳󠀶󠁿 Flag for Kars (TR-36) 🏴󠁴󠁲󠀵󠀱󠁿 Flag for Niğde (TR-51) 🏴󠁴󠁲󠀳󠀸󠁿 Flag for Kayseri (TR-38) 🏴󠁴󠁲󠀴󠀱󠁿 Flag for Kocaeli (TR-41) 🏴󠁴󠁲󠀱󠀸󠁿 Flag for Çankırı (TR-18) 🏴󠁴󠁲󠀴󠀸󠁿 Flag for Muğla (TR-48) 🏴󠁴󠁲󠀴󠀲󠁿 Flag for Konya (TR-42) 🏴󠁴󠁲󠀴󠀴󠁿 Flag for Malatya (TR-44) 🏴󠁴󠁲󠀲󠀹󠁿 Flag for Gümüşhane (TR-29) 🏴󠁴󠁲󠀲󠀲󠁿 Flag for Edirne (TR-22) 🏴󠁴󠁲󠀳󠀹󠁿 Flag for Kırklareli (TR-39) 🏴󠁴󠁲󠀲󠀷󠁿 Flag for Gaziantep (TR-27) 🏴󠁴󠁲󠀵󠀵󠁿 Flag for Samsun (TR-55) 🏴󠁴󠁲󠀲󠀱󠁿 Flag for Diyarbakır (TR-21) 🏴󠁴󠁲󠀱󠀶󠁿 Flag for Bursa (TR-16) 🏴󠁴󠁲󠀱󠀹󠁿 Flag for Çorum (TR-19) 🏴󠁴󠁲󠀵󠀲󠁿 Flag for Ordu (TR-52) 🏴󠁴󠁲󠀴󠀵󠁿 Flag for Manisa (TR-45) 🏴󠁴󠁲󠀲󠀵󠁿 Flag for Erzurum (TR-25) 🏴󠁴󠁲󠀱󠀵󠁿 Flag for Burdur (TR-15) 🏴󠁴󠁲󠀳󠀲󠁿 Flag for Isparta (TR-32) 🏴󠁴󠁲󠀳󠀴󠁿 Flag for Istanbul (TR-34) 🏴󠁴󠁲󠀳󠀰󠁿 Flag for Hakkâri (TR-30) 🏴󠁴󠁲󠀳󠀱󠁿 Flag for Hatay (TR-31) 🏴󠁴󠁲󠀴󠀹󠁿 Flag for Muş (TR-49) 🏴󠁴󠁲󠀳󠀳󠁿 Flag for Mersin (TR-33) 🏴󠁴󠁲󠀵󠀶󠁿 Flag for Siirt (TR-56) 🏴󠁴󠁲󠀵󠀰󠁿 Flag for Nevşehir (TR-50) 🏴󠁴󠁲󠀲󠀳󠁿 Flag for Elazığ (TR-23) 🏴󠁴󠁲󠀲󠀸󠁿 Flag for Giresun (TR-28) 🏴󠁴󠁲󠀲󠀰󠁿 Flag for Denizli (TR-20) 🏴󠁴󠁲󠀴󠀷󠁿 Flag for Mardin (TR-47) 🏴󠁴󠁲󠀳󠀷󠁿 Flag for Kastamonu (TR-37) 🏴󠁴󠁲󠀵󠀴󠁿 Flag for Sakarya (TR-54) 🏴󠁴󠁲󠀴󠀰󠁿 Flag for Kırşehir (TR-40) 🏴󠁴󠁲󠀱󠀷󠁿 Flag for Çanakkale (TR-17) 🏴󠁴󠁲󠀵󠀳󠁿 Flag for Rize (TR-53) 🏴󠁴󠁲󠀲󠀶󠁿 Flag for Eskişehir (TR-26) 🏴󠁴󠁲󠀶󠀵󠁿 Flag for Van (TR-65) 🏴󠁴󠁴󠁰󠁲󠁴󠁿 Flag for Princes Town (TT-PRT) 🏴󠁴󠁴󠁣󠁴󠁴󠁿 Flag for Couva-Tabaquite-Talparo (TT-CTT) 🏴󠁴󠁴󠁴󠁯󠁢󠁿 Flag for Tobago (TT-TOB) 🏴󠁴󠁲󠀶󠀳󠁿 Flag for Şanlıurfa (TR-63) 🏴󠁴󠁴󠁡󠁲󠁩󠁿 Flag for Arima (TT-ARI) 🏴󠁴󠁲󠀶󠀷󠁿 Flag for Zonguldak (TR-67) 🏴󠁴󠁴󠁳󠁩󠁰󠁿 Flag for Siparia (TT-SIP) 🏴󠁴󠁲󠀷󠀵󠁿 Flag for Ardahan (TR-75) 🏴󠁴󠁲󠀷󠀹󠁿 Flag for Kilis (TR-79) 🏴󠁴󠁴󠁰󠁯󠁳󠁿 Flag for Port of Spain (TT-POS) 🏴󠁴󠁲󠀶󠀸󠁿 Flag for Aksaray (TR-68) 🏴󠁴󠁴󠁤󠁭󠁮󠁿 Flag for Diego Martin (TT-DMN) 🏴󠁴󠁲󠀶󠀹󠁿 Flag for Bayburt (TR-69) 🏴󠁴󠁲󠀵󠀹󠁿 Flag for Tekirdağ (TR-59) 🏴󠁴󠁲󠀷󠀲󠁿 Flag for Batman (TR-72) 🏴󠁴󠁴󠁣󠁨󠁡󠁿 Flag for Chaguanas (TT-CHA) 🏴󠁴󠁲󠀸󠀰󠁿 Flag for Osmaniye (TR-80) 🏴󠁴󠁲󠀷󠀷󠁿 Flag for Yalova (TR-77) 🏴󠁴󠁴󠁳󠁪󠁬󠁿 Flag for San Juan-Laventille (TT-SJL) 🏴󠁴󠁲󠀷󠀸󠁿 Flag for Karabük (TR-78) 🏴󠁴󠁲󠀶󠀶󠁿 Flag for Yozgat (TR-66) 🏴󠁴󠁴󠁭󠁲󠁣󠁿 Flag for Mayaro-Rio Claro (TT-MRC) 🏴󠁴󠁲󠀶󠀴󠁿 Flag for Uşak (TR-64) 🏴󠁴󠁲󠀵󠀷󠁿 Flag for Sinop (TR-57) 🏴󠁴󠁴󠁴󠁵󠁰󠁿 Flag for Tunapuna-Piarco (TT-TUP) 🏴󠁴󠁲󠀷󠀴󠁿 Flag for Bartın (TR-74) 🏴󠁴󠁲󠀷󠀱󠁿 Flag for Kırıkkale (TR-71) 🏴󠁴󠁴󠁰󠁥󠁤󠁿 Flag for Penal-Debe (TT-PED) 🏴󠁴󠁲󠀷󠀶󠁿 Flag for Iğdır (TR-76) 🏴󠁴󠁲󠀷󠀳󠁿 Flag for Şırnak (TR-73) 🏴󠁴󠁲󠀶󠀱󠁿 Flag for Trabzon (TR-61) 🏴󠁴󠁴󠁰󠁴󠁦󠁿 Flag for Point Fortin (TT-PTF) 🏴󠁴󠁲󠀶󠀲󠁿 Flag for Tunceli (TR-62) 🏴󠁴󠁲󠀶󠀰󠁿 Flag for Tokat (TR-60) 🏴󠁴󠁲󠀷󠀰󠁿 Flag for Karaman (TR-70) 🏴󠁴󠁴󠁳󠁦󠁯󠁿 Flag for San Fernando (TT-SFO) 🏴󠁴󠁲󠀵󠀸󠁿 Flag for Sivas (TR-58) 🏴󠁴󠁺󠀰󠀷󠁿 Flag for Zanzibar North (TZ-07) 🏴󠁴󠁷󠁣󠁨󠁡󠁿 Flag for Changhua (TW-CHA) 🏴󠁴󠁶󠁶󠁡󠁩󠁿 Flag for Vaitupu (TV-VAI) 🏴󠁴󠁷󠁫󠁨󠁨󠁿 Flag for Kaohsiung (TW-KHH) 🏴󠁴󠁺󠀰󠀹󠁿 Flag for Kilimanjaro (TZ-09) 🏴󠁴󠁷󠁫󠁩󠁮󠁿 Flag for Kinmen (TW-KIN) 🏴󠁴󠁷󠁰󠁥󠁮󠁿 Flag for Penghu (TW-PEN) 🏴󠁴󠁷󠁴󠁮󠁮󠁿 Flag for Tainan (TW-TNN) 🏴󠁴󠁶󠁮󠁫󠁦󠁿 Flag for Nukufetau (TV-NKF) 🏴󠁴󠁺󠀰󠀸󠁿 Flag for Kigoma (TZ-08) 🏴󠁴󠁷󠁴󠁰󠁥󠁿 Flag for Taipei (TW-TPE) 🏴󠁴󠁷󠁰󠁩󠁦󠁿 Flag for Pingtung (TW-PIF) 🏴󠁴󠁷󠁩󠁬󠁡󠁿 Flag for Yilan (TW-ILA) 🏴󠁴󠁷󠁴󠁡󠁯󠁿 Flag for Taoyuan (TW-TAO) 🏴󠁴󠁺󠀰󠀳󠁿 Flag for Dodoma (TZ-03) 🏴󠁴󠁶󠁮󠁵󠁩󠁿 Flag for Nui (TV-NUI) 🏴󠁴󠁶󠁮󠁩󠁴󠁿 Flag for Niutao (TV-NIT) 🏴󠁴󠁺󠀰󠀶󠁿 Flag for North Pemba (TZ-06) 🏴󠁴󠁷󠁮󠁷󠁴󠁿 Flag for New Taipei (TW-NWT) 🏴󠁴󠁺󠀰󠀴󠁿 Flag for Iringa (TZ-04) 🏴󠁴󠁺󠀰󠀵󠁿 Flag for Kagera (TZ-05) 🏴󠁴󠁷󠁹󠁵󠁮󠁿 Flag for Yunlin (TW-YUN) 🏴󠁴󠁷󠁬󠁩󠁥󠁿 Flag for Lienchiang (TW-LIE) 🏴󠁴󠁶󠁮󠁭󠁧󠁿 Flag for Nanumanga (TV-NMG) 🏴󠁴󠁺󠀰󠀲󠁿 Flag for Dar es Salaam (TZ-02) 🏴󠁴󠁶󠁮󠁭󠁡󠁿 Flag for Nanumea (TV-NMA) 🏴󠁴󠁷󠁴󠁴󠁴󠁿 Flag for Taitung (TW-TTT) 🏴󠁴󠁷󠁮󠁡󠁮󠁿 Flag for Nantou (TW-NAN) 🏴󠁴󠁷󠁣󠁹󠁱󠁿 Flag for Chiayi (TW-CYQ) 🏴󠁴󠁺󠀰󠀱󠁿 Flag for Arusha (TZ-01) 🏴󠁴󠁷󠁨󠁵󠁡󠁿 Flag for Hualien (TW-HUA) 🏴󠁴󠁷󠁣󠁹󠁩󠁿 Flag for Chiayi County (TW-CYI) 🏴󠁴󠁷󠁴󠁸󠁧󠁿 Flag for Taichung (TW-TXG) 🏴󠁴󠁷󠁫󠁥󠁥󠁿 Flag for Keelung (TW-KEE) 🏴󠁴󠁷󠁭󠁩󠁡󠁿 Flag for Miaoli (TW-MIA) 🏴󠁵󠁡󠀴󠀳󠁿 Flag for Crimea (UA-43) 🏴󠁴󠁺󠀱󠀲󠁿 Flag for Lindi (TZ-12) 🏴󠁴󠁺󠀲󠀶󠁿 Flag for Manyara (TZ-26) 🏴󠁵󠁡󠀰󠀹󠁿 Flag for Luhanshchyna (UA-09) 🏴󠁴󠁺󠀲󠀰󠁿 Flag for Rukwa (TZ-20) 🏴󠁵󠁡󠀱󠀲󠁿 Flag for Dnipropetrovshchyna (UA-12) 🏴󠁵󠁡󠀰󠀷󠁿 Flag for Volyn (UA-07) 🏴󠁴󠁺󠀲󠀲󠁿 Flag for Shinyanga (TZ-22) 🏴󠁵󠁡󠀰󠀵󠁿 Flag for Vinnychchyna (UA-05) 🏴󠁴󠁺󠀲󠀱󠁿 Flag for Ruvuma (TZ-21) 🏴󠁴󠁺󠀲󠀸󠁿 Flag for Katavi (TZ-28) 🏴󠁵󠁡󠀲󠀳󠁿 Flag for Zaporizhzhya (UA-23) 🏴󠁵󠁡󠀳󠀲󠁿 Flag for Kyivshchyna (UA-32) 🏴󠁴󠁺󠀲󠀳󠁿 Flag for Singida (TZ-23) 🏴󠁴󠁺󠀲󠀴󠁿 Flag for Tabora (TZ-24) 🏴󠁴󠁺󠀱󠀳󠁿 Flag for Mara (TZ-13) 🏴󠁴󠁺󠀲󠀷󠁿 Flag for Geita (TZ-27) 🏴󠁴󠁺󠀳󠀰󠁿 Flag for Simiyu (TZ-30) 🏴󠁵󠁡󠀴󠀸󠁿 Flag for Mykolayivschyna (UA-48) 🏴󠁵󠁡󠀳󠀵󠁿 Flag for Kirovohradschyna (UA-35) 🏴󠁵󠁡󠀵󠀶󠁿 Flag for Rivnenshchyna (UA-56) 🏴󠁵󠁡󠀵󠀳󠁿 Flag for Poltavshchyna (UA-53) 🏴󠁴󠁺󠀱󠀴󠁿 Flag for Mbeya (TZ-14) 🏴󠁴󠁺󠀱󠀸󠁿 Flag for Mwanza (TZ-18) 🏴󠁵󠁡󠀲󠀱󠁿 Flag for Zakarpattia (UA-21) 🏴󠁴󠁺󠀱󠀰󠁿 Flag for South Pemba (TZ-10) 🏴󠁴󠁺󠀱󠀹󠁿 Flag for Pwani (TZ-19) 🏴󠁴󠁺󠀱󠀷󠁿 Flag for Mtwara (TZ-17) 🏴󠁵󠁡󠀴󠀰󠁿 Flag for Sevastopol (UA-40) 🏴󠁵󠁡󠀵󠀱󠁿 Flag for Odeshchyna (UA-51) 🏴󠁵󠁡󠀴󠀶󠁿 Flag for Lvivshchyna (UA-46) 🏴󠁵󠁡󠀱󠀴󠁿 Flag for Donechchyna (UA-14) 🏴󠁵󠁡󠀲󠀶󠁿 Flag for Prykarpattia (UA-26) 🏴󠁴󠁺󠀱󠀵󠁿 Flag for Zanzibar Urban/West (TZ-15) 🏴󠁴󠁺󠀱󠀶󠁿 Flag for Morogoro (TZ-16) 🏴󠁴󠁺󠀲󠀹󠁿 Flag for Njombe (TZ-29) 🏴󠁵󠁡󠀷󠀷󠁿 Flag for Chernivtsi Oblast (UA-77) 🏴󠁵󠁭󠀹󠀵󠁿 Flag for Palmyra Atoll (UM-95) 🏴󠁵󠁳󠁫󠁳󠁿 Flag for Kansas (US-KS) 👨🏽‍👨🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁵󠁳󠁡󠁺󠁿 Flag for Arizona (US-AZ) 🏴󠁵󠁭󠀶󠀷󠁿 Flag for Johnston Atoll (UM-67) 🏴󠁵󠁡󠀷󠀴󠁿 Flag for Chernihivshchyna (UA-74) 🏴󠁵󠁭󠀸󠀴󠁿 Flag for Howland Island (UM-84) 🏴󠁵󠁳󠁧󠁡󠁿 Flag for Georgia (US-GA) 🏴󠁵󠁳󠁨󠁩󠁿 Flag for Hawaii (US-HI) 🏴󠁵󠁭󠀷󠀱󠁿 Flag for Midway Atoll (UM-71) 🏴󠁵󠁳󠁡󠁳󠁿 Flag for American Samoa (US-AS) 🏴󠁵󠁳󠁣󠁴󠁿 Flag for Connecticut (US-CT) 🏴󠁵󠁳󠁩󠁡󠁿 Flag for Iowa (US-IA) 🏴󠁵󠁡󠀶󠀱󠁿 Flag for Ternopilshchyna (UA-61) 🏴󠁵󠁧󠁮󠁿 Flag for Northern (UG-N) 🏴󠁵󠁳󠁧󠁵󠁿 Flag for Guam (US-GU) 🏴󠁵󠁭󠀸󠀱󠁿 Flag for Baker Island (UM-81) 🏴󠁵󠁧󠁥󠁿 Flag for Eastern (UG-E) 🏴󠁵󠁡󠀶󠀵󠁿 Flag for Khersonshchyna (UA-65) 🏴󠁵󠁡󠀵󠀹󠁿 Flag for Sumshchyna (UA-59) 🏴󠁵󠁳󠁩󠁮󠁿 Flag for Indiana (US-IN) 🏴󠁵󠁳󠁡󠁲󠁿 Flag for Arkansas (US-AR) 🏴󠁵󠁳󠁤󠁥󠁿 Flag for Delaware (US-DE) 🏴󠁵󠁡󠀶󠀳󠁿 Flag for Kharkivshchyna (UA-63) 🏴󠁵󠁳󠁡󠁬󠁿 Flag for Alabama (US-AL) 🏴󠁵󠁧󠁷󠁿 Flag for Western (UG-W) 🏴󠁵󠁡󠀶󠀸󠁿 Flag for Khmelnychchyna (UA-68) 🏴󠁵󠁭󠀷󠀶󠁿 Flag for Navassa Island (UM-76) 🏴󠁵󠁭󠀸󠀶󠁿 Flag for Jarvis Island (UM-86) 🏴󠁵󠁳󠁩󠁤󠁿 Flag for Idaho (US-ID) 🏴󠁵󠁭󠀸󠀹󠁿 Flag for Kingman Reef (UM-89) 🏴󠁵󠁳󠁦󠁬󠁿 Flag for Florida (US-FL) 🏴󠁵󠁭󠀷󠀹󠁿 Flag for Wake Island (UM-79) 🏴󠁵󠁳󠁩󠁬󠁿 Flag for Illinois (US-IL) 🏴󠁵󠁳󠁤󠁣󠁿 Flag for Washington DC (US-DC) 🏴󠁵󠁡󠀷󠀱󠁿 Flag for Cherkashchyna (UA-71) 🏴󠁵󠁳󠁮󠁹󠁿 Flag for New York (US-NY) 👨🏾‍👨🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁵󠁳󠁮󠁣󠁿 Flag for North Carolina (US-NC) 🏴󠁵󠁳󠁭󠁳󠁿 Flag for Mississippi (US-MS) 🏴󠁵󠁳󠁭󠁡󠁿 Flag for Massachusetts (US-MA) 🏴󠁵󠁳󠁮󠁶󠁿 Flag for Nevada (US-NV) 🏴󠁵󠁳󠁷󠁩󠁿 Flag for Wisconsin (US-WI) 🏴󠁵󠁳󠁭󠁤󠁿 Flag for Maryland (US-MD) 🏴󠁵󠁳󠁮󠁭󠁿 Flag for New Mexico (US-NM) 🏴󠁵󠁳󠁰󠁲󠁿 Flag for Puerto Rico (US-PR) 🏴󠁵󠁳󠁵󠁭󠁿 Flag for U.S. Outlying Islands (US-UM) 🏴󠁵󠁳󠁷󠁹󠁿 Flag for Wyoming (US-WY) 🏴󠁵󠁳󠁯󠁨󠁿 Flag for Ohio (US-OH) 🏴󠁵󠁳󠁫󠁹󠁿 Flag for Kentucky (US-KY) 🏴󠁵󠁳󠁮󠁪󠁿 Flag for New Jersey (US-NJ) 🏴󠁵󠁳󠁯󠁲󠁿 Flag for Oregon (US-OR) 🏴󠁵󠁳󠁭󠁩󠁿 Flag for Michigan (US-MI) 🏴󠁵󠁳󠁶󠁩󠁿 Flag for U.S. Virgin Islands (US-VI) 🏴󠁵󠁳󠁭󠁯󠁿 Flag for Missouri (US-MO) 🏴󠁵󠁳󠁰󠁡󠁿 Flag for Pennsylvania (US-PA) 🏴󠁵󠁳󠁶󠁡󠁿 Flag for Virginia (US-VA) 🏴󠁵󠁹󠁡󠁲󠁿 Flag for Artigas (UY-AR) 🏴󠁵󠁹󠁣󠁡󠁿 Flag for Canelones (UY-CA) 🏴󠁵󠁳󠁷󠁡󠁿 Flag for Washington (US-WA) 🏴󠁵󠁳󠁳󠁣󠁿 Flag for South Carolina (US-SC) 🏴󠁵󠁳󠁭󠁥󠁿 Flag for Maine (US-ME) 🏴󠁵󠁳󠁬󠁡󠁿 Flag for Louisiana (US-LA) 🏴󠁵󠁳󠁭󠁮󠁿 Flag for Minnesota (US-MN) 🏴󠁵󠁳󠁲󠁩󠁿 Flag for Rhode Island (US-RI) 🏴󠁵󠁳󠁷󠁶󠁿 Flag for West Virginia (US-WV) 🏴󠁵󠁳󠁴󠁸󠁿 Flag for Texas (US-TX) 🏴󠁵󠁳󠁵󠁴󠁿 Flag for Utah (US-UT) 🏴󠁵󠁳󠁯󠁫󠁿 Flag for Oklahoma (US-OK) 🏴󠁵󠁳󠁮󠁨󠁿 Flag for New Hampshire (US-NH) 🏴󠁵󠁺󠁳󠁡󠁿 Flag for Samarqand (UZ-SA) 🏴󠁵󠁹󠁭󠁡󠁿 Flag for Maldonado (UY-MA) 🏴󠁵󠁺󠁮󠁧󠁿 Flag for Namangan (UZ-NG) 🏴󠁶󠁣󠀰󠀱󠁿 Flag for Charlotte (VC-01) 🏴󠁵󠁹󠁳󠁡󠁿 Flag for Salto (UY-SA) 🏴󠁵󠁹󠁣󠁬󠁿 Flag for Cerro Largo (UY-CL) 🏴󠁵󠁹󠁴󠁡󠁿 Flag for Tacuarembó (UY-TA) 🏴󠁶󠁥󠁡󠁿 Flag for Capital (VE-A) 🏴󠁶󠁥󠁢󠁿 Flag for Anzoátegui (VE-B) 🏴󠁶󠁣󠀰󠀲󠁿 Flag for Saint Andrew (VC-02) 🏴󠁵󠁹󠁳󠁯󠁿 Flag for Soriano (UY-SO) 🏴󠁵󠁹󠁲󠁯󠁿 Flag for Rocha (UY-RO) 🏴󠁶󠁣󠀰󠀳󠁿 Flag for Saint David (VC-03) 🏴󠁵󠁹󠁳󠁪󠁿 Flag for San José (UY-SJ) 🏴󠁵󠁹󠁦󠁤󠁿 Flag for Florida (UY-FD) 🏴󠁵󠁹󠁣󠁯󠁿 Flag for Colonia (UY-CO) 🏴󠁵󠁹󠁦󠁳󠁿 Flag for Flores (UY-FS) 🏴󠁵󠁺󠁸󠁯󠁿 Flag for Xorazm (UZ-XO) 🏴󠁵󠁹󠁤󠁵󠁿 Flag for Durazno (UY-DU) 🏴󠁵󠁺󠁡󠁮󠁿 Flag for Andijan (UZ-AN) 🏴󠁶󠁥󠁤󠁿 Flag for Aragua (VE-D) 🏴󠁵󠁺󠁳󠁩󠁿 Flag for Sirdaryo (UZ-SI) 🏴󠁵󠁹󠁰󠁡󠁿 Flag for Paysandú (UY-PA) 🏴󠁶󠁣󠀰󠀶󠁿 Flag for Grenadines (VC-06) 🏴󠁵󠁹󠁲󠁶󠁿 Flag for Rivera (UY-RV) 🏴󠁵󠁹󠁬󠁡󠁿 Flag for Lavalleja (UY-LA) 🏴󠁵󠁺󠁳󠁵󠁿 Flag for Surxondaryo (UZ-SU) 🏴󠁵󠁺󠁴󠁯󠁿 Flag for Tashkent Province (UZ-TO) 🏴󠁵󠁺󠁱󠁡󠁿 Flag for Qashqadaryo (UZ-QA) 🏴󠁵󠁹󠁴󠁴󠁿 Flag for Treinta y Tres (UY-TT) 🏴󠁵󠁹󠁭󠁯󠁿 Flag for Montevideo (UY-MO) 🏴󠁵󠁺󠁢󠁵󠁿 Flag for Bukhara (UZ-BU) 🏴󠁵󠁺󠁦󠁡󠁿 Flag for Fergana (UZ-FA) 🏴󠁵󠁺󠁱󠁲󠁿 Flag for Karakalpakstan (UZ-QR) 🏴󠁵󠁺󠁪󠁩󠁿 Flag for Jizzakh (UZ-JI) 🏴󠁵󠁹󠁲󠁮󠁿 Flag for Río Negro (UY-RN) 🏴󠁵󠁺󠁴󠁫󠁿 Flag for Tashkent (UZ-TK) 🏴󠁶󠁣󠀰󠀵󠁿 Flag for Saint Patrick (VC-05) 🏴󠁵󠁺󠁮󠁷󠁿 Flag for Navoiy (UZ-NW) 🏴󠁶󠁥󠁫󠁿 Flag for Lara (VE-K) 🏴󠁶󠁥󠁯󠁿 Flag for Nueva Esparta (VE-O) 🏴󠁶󠁥󠁳󠁿 Flag for Táchira (VE-S) 🏴󠁶󠁥󠁦󠁿 Flag for Bolívar (VE-F) 🏴󠁶󠁮󠀲󠀱󠁿 Flag for Thanh Hóa (VN-21) 🏴󠁶󠁮󠀱󠀴󠁿 Flag for Hòa Bình (VN-14) 🏴󠁶󠁥󠁪󠁿 Flag for Guárico (VE-J) 🏴󠁶󠁥󠁨󠁿 Flag for Cojedes (VE-H) 🏴󠁶󠁮󠀲󠀶󠁿 Flag for Thừa Thiên–Huế (VN-26) 🏴󠁶󠁥󠁰󠁿 Flag for Portuguesa (VE-P) 🏴󠁶󠁮󠀱󠀸󠁿 Flag for Ninh Bình (VN-18) 🏴󠁶󠁥󠁲󠁿 Flag for Sucre (VE-R) 🏴󠁶󠁮󠀰󠀱󠁿 Flag for Lai Châu (VN-01) 🏴󠁶󠁮󠀰󠀹󠁿 Flag for Lạng Sơn (VN-09) 🏴󠁶󠁥󠁭󠁿 Flag for Miranda (VE-M) 🏴󠁶󠁮󠀲󠀴󠁿 Flag for Quảng Bình (VN-24) 🏴󠁶󠁥󠁥󠁿 Flag for Barinas (VE-E) 🏴󠁶󠁥󠁮󠁿 Flag for Monagas (VE-N) 🏴󠁶󠁮󠀲󠀲󠁿 Flag for Nghệ An (VN-22) 🏴󠁶󠁮󠀰󠀲󠁿 Flag for Lào Cai (VN-02) 🏴󠁶󠁮󠀰󠀷󠁿 Flag for Tuyên Quang (VN-07) 🏴󠁶󠁮󠀰󠀵󠁿 Flag for Sơn La (VN-05) 🏴󠁶󠁮󠀲󠀰󠁿 Flag for Thái Bình (VN-20) 🏴󠁶󠁥󠁷󠁿 Flag for Federal Dependencies (VE-W) 🏴󠁶󠁮󠀲󠀹󠁿 Flag for Quảng Ngãi (VN-29) 🏴󠁶󠁥󠁬󠁿 Flag for Mérida (VE-L) 🏴󠁶󠁥󠁩󠁿 Flag for Falcón (VE-I) 🏴󠁶󠁮󠀰󠀴󠁿 Flag for Cao Bằng (VN-04) 🏴󠁶󠁥󠁺󠁿 Flag for Amazonas (VE-Z) 🏴󠁶󠁮󠀰󠀶󠁿 Flag for Yên Bái (VN-06) 🏴󠁶󠁮󠀲󠀳󠁿 Flag for Hà Tĩnh (VN-23) 🏴󠁶󠁮󠀲󠀸󠁿 Flag for Kon Tum (VN-28) 🏴󠁶󠁥󠁸󠁿 Flag for Vargas (VE-X) 🏴󠁶󠁥󠁵󠁿 Flag for Yaracuy (VE-U) 🏴󠁶󠁥󠁴󠁿 Flag for Trujillo (VE-T) 🏴󠁶󠁮󠀱󠀳󠁿 Flag for Quảng Ninh (VN-13) 🏴󠁶󠁮󠀰󠀳󠁿 Flag for Hà Giang (VN-03) 🏴󠁶󠁮󠀲󠀷󠁿 Flag for Quảng Nam (VN-27) 🏴󠁶󠁮󠀵󠀶󠁿 Flag for Bắc Ninh (VN-56) 🏴󠁶󠁮󠀳󠀶󠁿 Flag for Ninh Thuận (VN-36) 🏴󠁶󠁮󠀶󠀹󠁿 Flag for Thái Nguyên (VN-69) 🏴󠁶󠁮󠀶󠀷󠁿 Flag for Nam Định (VN-67) 🏴󠁶󠁮󠀳󠀵󠁿 Flag for Lâm Đồng (VN-35) 🏴󠁶󠁮󠀶󠀱󠁿 Flag for Hải Dương (VN-61) 🏴󠁶󠁮󠀵󠀲󠁿 Flag for Sóc Trăng (VN-52) 🏴󠁶󠁮󠀷󠀳󠁿 Flag for Hậu Giang (VN-73) 🏴󠁶󠁮󠀷󠀰󠁿 Flag for Vĩnh Phúc (VN-70) 🏴󠁶󠁮󠀵󠀰󠁿 Flag for Bến Tre (VN-50) 🏴󠁶󠁮󠀵󠀳󠁿 Flag for Bắc Kạn (VN-53) 🏴󠁶󠁮󠀵󠀴󠁿 Flag for Bắc Giang (VN-54) 🏴󠁶󠁮󠀳󠀳󠁿 Flag for Đắk Lắk (VN-33) 🏴󠁶󠁮󠀵󠀷󠁿 Flag for Bình Dương (VN-57) 🏴󠁶󠁮󠁤󠁮󠁿 Flag for Da Nang (VN-DN) 🏴󠁶󠁮󠀴󠀶󠁿 Flag for Tiền Giang (VN-46) 🏴󠁶󠁮󠀴󠀳󠁿 Flag for Bà Rịa–Vũng Tàu (VN-43) 🏴󠁶󠁮󠀷󠀱󠁿 Flag for Điện Biên (VN-71) 🏴󠁶󠁮󠀵󠀸󠁿 Flag for Bình Phước (VN-58) 🏴󠁶󠁮󠁣󠁴󠁿 Flag for Can Tho (VN-CT) 🏴󠁶󠁮󠀵󠀵󠁿 Flag for Bạc Liêu (VN-55) 🏴󠁶󠁮󠀳󠀲󠁿 Flag for Phú Yên (VN-32) 🏴󠁶󠁮󠀴󠀴󠁿 Flag for An Giang (VN-44) 🏴󠁶󠁮󠀶󠀳󠁿 Flag for Hà Nam (VN-63) 🏴󠁶󠁮󠀵󠀹󠁿 Flag for Cà Mau (VN-59) 🏴󠁶󠁮󠀴󠀷󠁿 Flag for Kiên Giang (VN-47) 🏴󠁶󠁮󠀳󠀴󠁿 Flag for Khánh Hòa (VN-34) 🏴󠁶󠁮󠀴󠀵󠁿 Flag for Đồng Tháp (VN-45) 🏴󠁶󠁮󠀳󠀹󠁿 Flag for Đồng Nai (VN-39) 🏴󠁶󠁮󠁨󠁮󠁿 Flag for Hanoi (VN-HN) 🏴󠁶󠁮󠀴󠀹󠁿 Flag for Vĩnh Long (VN-49) 🏴󠁶󠁮󠀶󠀸󠁿 Flag for Phú Thọ (VN-68) 🏴󠁶󠁮󠀳󠀷󠁿 Flag for Tây Ninh (VN-37) 🏴󠁶󠁮󠀳󠀰󠁿 Flag for Gia Lai (VN-30) 🏴󠁶󠁮󠀷󠀲󠁿 Flag for Đắk Nông (VN-72) 🏴󠁶󠁮󠀴󠀰󠁿 Flag for Bình Thuận (VN-40) 🏴󠁶󠁮󠀴󠀱󠁿 Flag for Long An (VN-41) 🏴󠁶󠁮󠀳󠀱󠁿 Flag for Bình Định (VN-31) 🏴󠁷󠁦󠁵󠁶󠁿 Flag for Uvea (WF-UV) 🏴󠁹󠁥󠁳󠁤󠁿 Flag for Sa’dah (YE-SD) 🏴󠁹󠁥󠁡󠁢󠁿 Flag for Abyan (YE-AB) 🏴󠁹󠁥󠁨󠁪󠁿 Flag for Hajjah (YE-HJ) 🏴󠁶󠁵󠁭󠁡󠁰󠁿 Flag for Malampa (VU-MAP) 🏴󠁷󠁳󠁡󠁴󠁿 Flag for Atua (WS-AT) 🏴󠁷󠁳󠁶󠁦󠁿 Flag for Va’a-o-Fonoti (WS-VF) 🏴󠁹󠁥󠁨󠁵󠁿 Flag for Al Hudaydah (YE-HU) 🏴󠁷󠁳󠁰󠁡󠁿 Flag for Palauli (WS-PA) 🏴󠁷󠁳󠁳󠁡󠁿 Flag for Satupa’itea (WS-SA) 🏴󠁹󠁥󠁤󠁡󠁿 Flag for Dhale (YE-DA) 🏴󠁭󠁬󠀶󠁿 Flag for Tombouctou (ML-6) 🏴󠁹󠁥󠁲󠁡󠁿 Flag for Raymah (YE-RA) 🏴󠁶󠁵󠁳󠁡󠁭󠁿 Flag for Sanma (VU-SAM) 🏴󠁷󠁦󠁡󠁬󠁿 Flag for Alo (WF-AL) 🏴󠁹󠁥󠁭󠁲󠁿 Flag for Al Mahrah (YE-MR) 👨🏻‍👨🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone 🏴󠁹󠁥󠁡󠁤󠁿 Flag for ’Adan (YE-AD) 🏴󠁹󠁥󠁳󠁨󠁿 Flag for Shabwah (YE-SH) 🏴󠁶󠁵󠁴󠁡󠁥󠁿 Flag for Tafea (VU-TAE) 🏴󠁹󠁥󠁡󠁭󠁿 Flag for Amran (YE-AM) 🏴󠁶󠁵󠁰󠁡󠁭󠁿 Flag for Penama (VU-PAM) 🏴󠁹󠁥󠁭󠁷󠁿 Flag for Al Mahwit (YE-MW) 🏴󠁷󠁳󠁧󠁥󠁿 Flag for Gaga’emauga (WS-GE) 🏴󠁹󠁥󠁨󠁤󠁿 Flag for Hadramaut (YE-HD) 🏴󠁷󠁳󠁡󠁬󠁿 Flag for Aiga-i-le-Tai (WS-AL) 🏴󠁹󠁥󠁭󠁡󠁿 Flag for Ma’rib (YE-MA) 🏴󠁹󠁥󠁢󠁡󠁿 Flag for Al Bayda (YE-BA) 🏴󠁶󠁮󠁨󠁰󠁿 Flag for Haiphong (VN-HP) 🏴󠁷󠁳󠁡󠁡󠁿 Flag for A’ana (WS-AA) 🏴󠁷󠁦󠁳󠁧󠁿 Flag for Sigave (WF-SG) 🏴󠁹󠁥󠁬󠁡󠁿 Flag for Lahij (YE-LA) 🏴󠁶󠁵󠁳󠁥󠁥󠁿 Flag for Shefa (VU-SEE) 🏴󠁹󠁥󠁩󠁢󠁿 Flag for Ibb (YE-IB) 🏴󠁶󠁵󠁴󠁯󠁢󠁿 Flag for Torba (VU-TOB) 🏴󠁹󠁥󠁪󠁡󠁿 Flag for Al Jawf (YE-JA) 🏴󠁷󠁳󠁴󠁵󠁿 Flag for Tuamasaga (WS-TU) 🏴󠁹󠁥󠁤󠁨󠁿 Flag for Dhamar (YE-DH) 🏴󠁺󠁡󠁷󠁣󠁿 Flag for Western Cape (ZA-WC) 🏴󠁹󠁥󠁳󠁵󠁿 Flag for Arkhabil Suqutra (YE-SU) 🏴󠁺󠁷󠁭󠁮󠁿 Flag for Matabeleland North (ZW-MN) 🏴󠁺󠁷󠁭󠁥󠁿 Flag for Mashonaland East (ZW-ME) 🏴󠁺󠁭󠀰󠀶󠁿 Flag for North-Western (ZM-06) 🏴󠁹󠁥󠁳󠁮󠁿 Flag for Sana’a (YE-SN) 🏴󠁺󠁡󠁬󠁰󠁿 Flag for Limpopo (ZA-LP) 🏴󠁺󠁭󠀰󠀳󠁿 Flag for Eastern (ZM-03) 🏴󠁺󠁷󠁭󠁩󠁿 Flag for Midlands (ZW-MI) 🏴󠁺󠁷󠁢󠁵󠁿 Flag for Bulawayo (ZW-BU) 🏴󠁺󠁭󠀰󠀵󠁿 Flag for Northern (ZM-05) 🏴󠁺󠁭󠀰󠀷󠁿 Flag for Southern (ZM-07) 🏴󠁺󠁡󠁦󠁳󠁿 Flag for Free (ZA-FS) 🏴󠁺󠁷󠁭󠁳󠁿 Flag for Matabeleland South (ZW-MS) 🏴󠁺󠁡󠁥󠁣󠁿 Flag for Eastern Cape (ZA-EC) 🏴󠁺󠁭󠀰󠀱󠁿 Flag for Western (ZM-01) 👨🏼‍👨🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁺󠁭󠀰󠀸󠁿 Flag for Copperbelt (ZM-08) 🏴󠁺󠁡󠁮󠁷󠁿 Flag for North West (ZA-NW) 🏴󠁺󠁭󠀱󠀰󠁿 Flag for Muchinga (ZM-10) 🏴󠁺󠁡󠁧󠁴󠁿 Flag for Gauteng (ZA-GT) 🏴󠁺󠁭󠀰󠀹󠁿 Flag for Lusaka (ZM-09) 🏴󠁺󠁭󠀰󠀲󠁿 Flag for Central (ZM-02) 🏴󠁺󠁡󠁮󠁣󠁿 Flag for Northern Cape (ZA-NC) 🏴󠁺󠁡󠁭󠁰󠁿 Flag for Mpumalanga (ZA-MP) 🏴󠁹󠁥󠁴󠁡󠁿 Flag for Taiz (YE-TA) 🏴󠁺󠁡󠁮󠁬󠁿 Flag for KwaZulu-Natal (ZA-NL) 🏴󠁺󠁷󠁭󠁡󠁿 Flag for Manicaland (ZW-MA) 🏴󠁺󠁷󠁭󠁶󠁿 Flag for Masvingo (ZW-MV) 🏴󠁺󠁭󠀰󠀴󠁿 Flag for Luapula (ZM-04) 🏴󠁺󠁷󠁭󠁷󠁿 Flag for Mashonaland West (ZW-MW) 🏴󠁺󠁷󠁨󠁡󠁿 Flag for Harare (ZW-HA) 👨🏽‍👨🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone 👨🏾‍👨🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁦󠁲󠁰󠁤󠁬󠁿 Flag for Pays-de-la-Loire (FR-PDL) 🏴󠁬󠁴󠀲󠀰󠁿 Flag for Klaipėdos Municipality (LT-20) 🏴󠁧󠁲󠁭󠁿 Flag for Crete (GR-M) 󠁸 Tag Latin Small Letter X 🏴󠁩󠁲󠀲󠀱󠁿 Flag for Mazandaran (IR-21) 🏴󠁲󠁵󠁰󠁲󠁩󠁿 Flag for Primorsky Krai (RU-PRI) 🏴󠁪󠁰󠀰󠀷󠁿 Flag for Fukushima (JP-07) 🏴󠁣󠁡󠁭󠁢󠁿 Flag for Manitoba (CA-MB) 👨🏻‍👨🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👩🏻‍❤️‍👩🏻 Couple With Heart - Woman: Light Skin Tone, Woman: Light Skin Tone 🏴󠁣󠁡󠁱󠁣󠁿 Flag for Quebec (CA-QC) 👨‍👩‍👶 Family: Man, Woman, Baby 🏴󠁮󠁡󠁫󠁥󠁿 Flag for Kavango East (NA-KE) 🏴󠁭󠁸󠁳󠁬󠁰󠁿 Flag for San Luis Potosí (MX-SLP) 🏴󠁥󠁥󠀵󠀹󠁿 Flag for Lääne-Viru (EE-59) 🏴󠁬󠁲󠁢󠁧󠁿 Flag for Bong (LR-BG) 🏴󠁰󠁳󠁤󠁥󠁢󠁿 Flag for Deir al-Balah (PS-DEB) 👨🏿‍👨🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁪󠁭󠀰󠀳󠁿 Flag for Saint Thomas (JM-03) 🏴󠁰󠁷󠀱󠀰󠀰󠁿 Flag for Kayangel (PW-100) 🏴󠁣󠁧󠀱󠀲󠁿 Flag for Pool (CG-12) 👨‍❤️‍👨🏾 Couple With Heart - Man, Man: Medium-Dark Skin Tone 🏴󠁥󠁳󠁩󠁢󠁿 Flag for Balearic Islands (ES-IB) 👩‍👨‍👦 Family: Woman, Man, Boy 🏴󠁦󠁩󠀱󠀸󠁿 Flag for Uusimaa (FI-18) 👨🏻‍👩🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁲󠁣󠁥󠁿 Flag for Ceará (BR-CE) 👨‍👩‍👦‍👶 Family: Man, Woman, Boy, Baby 👨🏻‍👨🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁭󠁫󠀲󠀵󠁿 Flag for Demir Hisar (MK-25) 🏴󠁣󠁬󠁡󠁮󠁿 Flag for Antofagasta (CL-AN) 🏴󠁢󠁢󠀰󠀱󠁿 Flag for Christ Church (BB-01) 🏴󠁥󠁥󠀳󠀷󠁿 Flag for Harju (EE-37) 👨🏿‍❤️‍💋‍👩🏽 Kiss - Man: Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁮󠁲󠀱󠀴󠁿 Flag for Yaren (NR-14) 👩‍❤️‍👩🏻 Couple With Heart - Woman, Woman: Light Skin Tone 🏴󠁭󠁹󠀱󠀰󠁿 Flag for Selangor (MY-10) 👨🏼‍👨🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁰󠁥󠁡󠁰󠁵󠁿 Flag for Apurímac (PE-APU) 👩‍👨‍👦‍👧 Family: Woman, Man, Boy, Girl 👨🏿‍👩🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁧󠁥󠁡󠁢󠁿 Flag for Abkhazia (GE-AB) 🏴󠁬󠁩󠀰󠀸󠁿 Flag for Schellenberg (LI-08) 🏴󠁴󠁲󠀸󠀱󠁿 Flag for Düzce (TR-81) 👩🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩‍👨‍👶‍👦 Family: Woman, Man, Baby, Boy 🏴󠁭󠁸󠁳󠁯󠁮󠁿 Flag for Sonora (MX-SON) 🏴󠁣󠁩󠁳󠁭󠁿 Flag for Sassandra-Marahoué (CI-SM) 🏴󠁰󠁥󠁡󠁲󠁥󠁿 Flag for Arequipa (PE-ARE) 👩🏽‍❤️‍👩🏼 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁣󠁧󠀱󠀱󠁿 Flag for Bouenza (CG-11) 🏴󠁪󠁭󠀱󠀴󠁿 Flag for Saint Catherine (JM-14) 🏴󠁳󠁩󠀱󠀲󠀲󠁿 Flag for Škofja Loka (SI-122) 👩🏻‍❤️‍💋‍👨🏼 Kiss - Woman: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁴󠁷󠁨󠁳󠁺󠁿 Flag for Hsinchu (TW-HSZ) 👩🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁬󠁫󠀳󠁿 Flag for Southern (LK-3) 👨‍❤️‍💋‍👨🏼 Kiss - Man, Man: Medium-Light Skin Tone 👨🏽‍👨🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁮󠁩󠁬󠁥󠁿 Flag for León (NI-LE) 🏴󠁨󠁲󠀰󠀵󠁿 Flag for Varaždin (HR-05) 🏴󠁣󠁯󠁡󠁮󠁴󠁿 Flag for Antioquia (CO-ANT) 🏴󠁭󠁣󠁳󠁤󠁿 Flag for Sainte-Dévote Chapel (MC-SD) 🏴󠁭󠁫󠀶󠀱󠁿 Flag for Plasnica (MK-61) 👨🏾‍❤️‍👨🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Light Skin Tone 🏴󠁧󠁲󠁧󠁿 Flag for West Greece (GR-G) 🏴󠁭󠁶󠁮󠁯󠁿 Flag for North Province (MV-NO) 👨‍❤️‍👩🏻 Couple With Heart - Man, Woman: Light Skin Tone 🏴󠁶󠁥󠁣󠁿 Flag for Apure (VE-C) ☿️ Mercury 🏴󠁵󠁳󠁭󠁴󠁿 Flag for Montana (US-MT) 👩🏼‍❤️‍👨🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏾‍👨🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁥󠁣󠁥󠁿 Flag for Esmeraldas (EC-E) 🏴󠁤󠁺󠀰󠀸󠁿 Flag for Béchar (DZ-08) 🏴󠁮󠁬󠁮󠁨󠁿 Flag for North Holland (NL-NH) 🏴󠁦󠁲󠁢󠁬󠁿 Flag for St. Barthélemy (FR-BL) 🏴󠁣󠁦󠁵󠁫󠁿 Flag for Ouaka (CF-UK) 🏴󠁳󠁤󠁲󠁳󠁿 Flag for Red Sea (SD-RS) 🏴󠁭󠁸󠁴󠁡󠁢󠁿 Flag for Tabasco (MX-TAB) 🏴󠁣󠁮󠀹󠀲󠁿 Flag for Macau SAR China (CN-92) 🏴󠁨󠁵󠁥󠁧󠁿 Flag for Eger (HU-EG) 🏴󠁲󠁵󠁳󠁥󠁿 Flag for North Ossetia-Alania (RU-SE) 🏴󠁣󠁤󠁥󠁱󠁿 Flag for Équateur (CD-EQ) 👨🏿‍👨🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁥󠁳󠁰󠁶󠁿 Flag for Basque Country (ES-PV) 👨🏽‍❤️‍💋‍👨🏻 Kiss - Man: Medium Skin Tone, Man: Light Skin Tone 🏴󠁴󠁮󠀷󠀱󠁿 Flag for Gafsa (TN-71) 🏴󠁦󠁩󠀰󠀶󠁿 Flag for Tavastia Proper (FI-06) 🏴󠁩󠁲󠀳󠀰󠁿 Flag for Razavi Khorasan (IR-30) 🏴󠁳󠁩󠀱󠀵󠀴󠁿 Flag for Dobje (SI-154) 👨🏼‍❤️‍💋‍👨🏻 Kiss - Man: Medium-Light Skin Tone, Man: Light Skin Tone 🏴󠁧󠁴󠁲󠁥󠁿 Flag for Retalhuleu (GT-RE) 🏴󠁫󠁩󠁬󠁿 Flag for Line Islands (KI-L) 🏴󠁩󠁲󠀰󠀲󠁿 Flag for West Azarbaijan (IR-02) 🏴󠁣󠁯󠁮󠁡󠁲󠁿 Flag for Nariño (CO-NAR) 🏴󠁺󠁷󠁭󠁣󠁿 Flag for Mashonaland Central (ZW-MC) 👨🏻‍❤️‍👨🏻 Couple With Heart - Man: Light Skin Tone, Man: Light Skin Tone 🏴󠁩󠁴󠀴󠀵󠁿 Flag for Emilia-Romagna (IT-45) 🏴󠁥󠁳󠁶󠁣󠁿 Flag for Valencian Community (ES-VC) 🏴󠁴󠁨󠀷󠀵󠁿 Flag for Samut Songkhram (TH-75) 🏴󠁦󠁲󠁩󠁤󠁦󠁿 Flag for Île-de-France (FR-IDF) 🏴󠁬󠁳󠁡󠁿 Flag for Maseru (LS-A) 🏴󠁫󠁥󠀲󠀵󠁿 Flag for Marsabit (KE-25) 🏴󠁤󠁺󠀰󠀱󠁿 Flag for Adrar (DZ-01) 🏴󠁳󠁶󠁵󠁳󠁿 Flag for Usulután (SV-US) 🏴󠁬󠁶󠀰󠀶󠀰󠁿 Flag for Mazsalaca (LV-060) 👩🏻‍❤️‍💋‍👩🏾 Kiss - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁴󠁨󠀳󠀶󠁿 Flag for Chaiyaphum (TH-36) 🏴󠁰󠁨󠀰󠀷󠁿 Flag for Central Visayas (PH-07) 🏴󠁴󠁨󠀸󠀶󠁿 Flag for Chumphon (TH-86) 🏴󠁣󠁩󠁺󠁺󠁿 Flag for Zanzan (CI-ZZ) 🏴󠁥󠁳󠁣󠁬󠁿 Flag for Castile and León (ES-CL) 👨🏻‍👨🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁳󠁡󠀱󠀱󠁿 Flag for Al Bahah (SA-11) 🏴󠁢󠁱󠁳󠁥󠁿 Flag for Sint Eustatius (BQ-SE) 🏴󠁦󠁩󠀰󠀱󠁿 Flag for Åland Islands (FI-01) 🏴󠁣󠁲󠁨󠁿 Flag for Heredia (CR-H) 🏴󠁴󠁲󠀴󠀳󠁿 Flag for Kütahya (TR-43) 🏴󠁷󠁳󠁶󠁳󠁿 Flag for Vaisigano (WS-VS) 👨🏿‍❤️‍💋‍👩🏼 Kiss - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁳󠁩󠀰󠀵󠀲󠁿 Flag for Kranj (SI-052) 🏴󠁶󠁥󠁶󠁿 Flag for Zulia (VE-V) 👩🏽‍❤️‍💋‍👨🏼 Kiss - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁬󠁵󠁣󠁡󠁿 Flag for Capellen (LU-CA) 👩🏽‍❤️‍👩🏾 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍👨🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁧󠁹󠁥󠁢󠁿 Flag for East Berbice-Corentyne (GY-EB) 🏴󠁴󠁨󠀱󠀶󠁿 Flag for Lopburi (TH-16) 🏴󠁭󠁴󠀲󠀵󠁿 Flag for Luqa (MT-25) 👨🏻‍❤️‍👨🏼 Couple With Heart - Man: Light Skin Tone, Man: Medium-Light Skin Tone 👨🏽‍👨🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏻‍❤️‍👩🏽 Couple With Heart - Woman: Light Skin Tone, Woman: Medium Skin Tone 🏴󠁭󠁸󠁢󠁣󠁳󠁿 Flag for Baja California Sur (MX-BCS) 🏴󠁥󠁧󠁢󠁮󠁳󠁿 Flag for Beni Suef (EG-BNS) 🏴󠁴󠁨󠀹󠀳󠁿 Flag for Phatthalung (TH-93) 🏴󠁴󠁺󠀲󠀵󠁿 Flag for Tanga (TZ-25) 🏴󠁭󠁡󠀰󠀴󠁿 Flag for Oriental (MA-04) 👨🏾‍👨🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👩🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁳󠁩󠀰󠀲󠀷󠁿 Flag for Gorenja Vas–Poljane (SI-027) 🏴󠁴󠁴󠁳󠁧󠁥󠁿 Flag for Sangre Grande (TT-SGE) 🏴󠁬󠁶󠀰󠀴󠀶󠁿 Flag for Koknese (LV-046) 🏴󠁳󠁩󠀰󠀸󠀶󠁿 Flag for Odranci (SI-086) 🏴󠁮󠁺󠁮󠁳󠁮󠁿 Flag for Nelson (NZ-NSN) 🏴󠁨󠁵󠁳󠁺󠁿 Flag for Szabolcs-Szatmár-Bereg (HU-SZ) 👩🏾‍❤️‍💋‍👨🏽 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁳󠁩󠀲󠀱󠀰󠁿 Flag for Sveti Jurij v Slovenskih Goricah (SI-210) ߷ NKo Symbol Gbakurunen 🏴󠁮󠁧󠁤󠁥󠁿 Flag for Delta (NG-DE) 🏴󠁭󠁤󠁣󠁳󠁿 Flag for Căușeni (MD-CS) 👩🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁣󠁵󠀹󠀹󠁿 Flag for Isla de la Juventud (CU-99) 🏴󠁫󠁨󠀲󠀰󠁿 Flag for Svay Rieng (KH-20) 🏴󠁴󠁤󠁨󠁬󠁿 Flag for Hadjer-Lamis (TD-HL) 🏴󠁪󠁰󠀲󠀱󠁿 Flag for Gifu (JP-21) 🏴󠁬󠁶󠀰󠀴󠀱󠁿 Flag for Jelgava Municipality (LV-041) 🏴󠁰󠁫󠁴󠁡󠁿 Flag for Federally Administered Tribal Areas (PK-TA) 🏴󠁭󠁴󠀶󠀲󠁿 Flag for Xewkija (MT-62) 🏴󠁭󠁲󠀱󠀰󠁿 Flag for Guidimaka (MR-10) 🏴󠁭󠁫󠀰󠀲󠁿 Flag for Aračinovo (MK-02) 🏴󠁳󠁩󠀲󠀰󠀸󠁿 Flag for Log–Dragomer (SI-208) 🏴󠁳󠁩󠀱󠀲󠀵󠁿 Flag for Šmartno ob Paki (SI-125) 🏴󠁣󠁯󠁤󠁣󠁿 Flag for Capital District (CO-DC) 🏴󠁬󠁶󠀱󠀰󠀶󠁿 Flag for Ventspils Municipality (LV-106) 🏴󠁭󠁶󠁳󠁣󠁿 Flag for South Central Province (MV-SC) 🏴󠁩󠁮󠁡󠁳󠁿 Flag for Assam (IN-AS) 🏴󠁬󠁴󠀰󠀲󠁿 Flag for Alytus Municipality (LT-02) 🏴󠁶󠁮󠀶󠀶󠁿 Flag for Hưng Yên (VN-66) 👨🏻‍👨🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨🏼‍👨🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁧󠁴󠁳󠁭󠁿 Flag for San Marcos (GT-SM) 👨🏼‍👨🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁤󠁥󠁳󠁨󠁿 Flag for Schleswig-Holstein (DE-SH) 👨‍👨‍👶‍👧 Family: Man, Man, Baby, Girl ️ Variation Selector-16 👨🏽‍👨🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👨🏻‍👨🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone 👨🏼‍👨🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏽‍👨🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone 👩‍❤️‍👨🏿 Couple With Heart - Woman, Man: Dark Skin Tone 🏴󠁥󠁳󠁣󠁢󠁿 Flag for Cantabria (ES-CB) 🏴󠁳󠁳󠁵󠁹󠁿 Flag for Unity (SS-UY) 👩🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👨🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏼‍👨🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👩🏽‍👨🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩🏼‍👨🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏻‍👨🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👨🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone 👩🏻‍👨🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏼‍👨🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏻‍👨🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍👨🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👨🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏻‍👨🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍👨🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👨🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍👨🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏼‍👩🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏻‍👩🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👩🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍👩🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👩🏽‍👩🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏻‍👩🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👩🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👩🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏽‍👩🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏻‍👩🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👩🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone 👨🏾‍👩🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏼‍👩🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👩🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁩󠁤󠁭󠁬󠁿 Flag for Maluku Islands (ID-ML) 👩🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁤󠁫󠀸󠀳󠁿 Flag for Southern Denmark (DK-83) 🏴󠁭󠁫󠀸󠀵󠁿 Flag for Skopje (MK-85) 👨🏼‍❤️‍💋‍👩 Kiss - Man: Medium-Light Skin Tone, Woman 🏴󠁰󠁴󠀰󠀲󠁿 Flag for Beja (PT-02) 🏴󠁩󠁴󠀸󠀸󠁿 Flag for Sardinia (IT-88) 🏴󠁤󠁥󠁢󠁹󠁿 Flag for Bavaria (DE-BY) 🏴󠁰󠁧󠁥󠁢󠁲󠁿 Flag for East New Britain (PG-EBR) 🏴󠁩󠁴󠀳󠀲󠁿 Flag for Trentino-South Tyrol (IT-32) 🏴󠁵󠁳󠁴󠁮󠁿 Flag for Tennessee (US-TN) 🏴󠁣󠁡󠁳󠁫󠁿 Flag for Saskatchewan (CA-SK) 🏴󠁴󠁶󠁦󠁵󠁮󠁿 Flag for Funafuti (TV-FUN) 🏴󠁴󠁪󠁧󠁢󠁿 Flag for Gorno-Badakhshan (TJ-GB) 🏴󠁳󠁯󠁢󠁮󠁿 Flag for Banaadir (SO-BN) 🏴󠁳󠁩󠀱󠀰󠀰󠁿 Flag for Radenci (SI-100) 🏴󠁤󠁥󠁢󠁷󠁿 Flag for Baden-Württemberg (DE-BW) 👩🏿‍👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁶󠁥󠁧󠁿 Flag for Carabobo (VE-G) ‍ Zero Width Joiner 🏴󠁫󠁥󠀳󠀱󠁿 Flag for Nakuru (KE-31) 🏴󠁴󠁧󠁭󠁿 Flag for Maritime (TG-M) 🏴󠁮󠁧󠁢󠁯󠁿 Flag for Borno (NG-BO) 🏴󠁭󠁤󠁳󠁮󠁿 Flag for Transnistria (MD-SN) 🏴󠁩󠁲󠀰󠀷󠁿 Flag for Tehran (IR-07) 🏴󠁲󠁵󠁤󠁡󠁿 Flag for Dagestan (RU-DA) 🏴󠁯󠁭󠁷󠁵󠁿 Flag for Al Wusta (OM-WU) 🏴󠁣󠁺󠀴󠀲󠁿 Flag for Ústecký kraj (CZ-42) 🏴󠁭󠁹󠀱󠀴󠁿 Flag for Kuala Lumpur (MY-14) 🏴󠁰󠁥󠁡󠁹󠁡󠁿 Flag for Ayacucho (PE-AYA) 🏴󠁵󠁡󠀳󠀰󠁿 Flag for Kiev (UA-30) 🏴󠁡󠁧󠀰󠀸󠁿 Flag for Saint Philip (AG-08) 🏴󠁭󠁴󠀲󠀹󠁿 Flag for Mdina (MT-29) 🏴󠁧󠁢󠁮󠁩󠁲󠁿 Flag for Northern Ireland (GB-NIR) 🏴󠁦󠁲󠁡󠁲󠁡󠁿 Flag for Auvergne-Rhône-Alpes (FR-ARA) 🏴󠁭󠁸󠁤󠁵󠁲󠁿 Flag for Durango (MX-DUR) 👨🏼‍👩🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁬󠁫󠀵󠁿 Flag for Eastern (LK-5) 🏴󠁮󠁧󠁯󠁧󠁿 Flag for Ogun (NG-OG) 🏴󠁬󠁹󠁪󠁩󠁿 Flag for Jafara (LY-JI) 🏴󠁳󠁥󠁭󠁿 Flag for Skåne (SE-M) 👨🏽‍👩🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁲󠁭󠁳󠁿 Flag for Mato Grosso do Sul (BR-MS) 🏴󠁧󠁴󠁳󠁲󠁿 Flag for Santa Rosa (GT-SR) 👨🏼‍👩🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁳󠁩󠀱󠀵󠀱󠁿 Flag for Braslovče (SI-151) 🏴󠁰󠁴󠀳󠀰󠁿 Flag for Madeira (PT-30) 🏴󠁳󠁶󠁳󠁶󠁿 Flag for San Vicente (SV-SV) 🏴󠁩󠁲󠀳󠀲󠁿 Flag for Alborz (IR-32) 🏴󠁷󠁳󠁦󠁡󠁿 Flag for Fa’asaleleaga (WS-FA) 👨🏼‍👨🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁣󠁡󠁮󠁬󠁿 Flag for Newfoundland and Labrador (CA-NL) 🏴󠁧󠁲󠁪󠁿 Flag for Peloponnese (GR-J) 🏴󠁮󠁬󠁳󠁸󠁿 Flag for Sint Maarten (NL-SX) 🏴󠁭󠁴󠀴󠀸󠁿 Flag for St. Julian’s (MT-48) 🏴󠁮󠁧󠁡󠁤󠁿 Flag for Adamawa (NG-AD) 👩🏿‍👩🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁳󠁴󠁳󠁿 Flag for São Tomé (ST-S) 👩🏻‍👩🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁬󠁶󠀰󠀱󠀰󠁿 Flag for Auce (LV-010) 🏴󠁰󠁨󠀱󠀵󠁿 Flag for Cordillera Administrative (PH-15) 🏴󠁪󠁰󠀱󠀸󠁿 Flag for Fukui (JP-18) 👨🏿‍👩🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁧󠁥󠁫󠁡󠁿 Flag for Kakheti (GE-KA) 🏴󠁫󠁲󠀴󠀹󠁿 Flag for Jeju (KR-49) 🏴󠁭󠁡󠀱󠀳󠁿 Flag for Souss-Massa-Drâa (MA-13) 🏴󠁬󠁶󠀰󠀳󠀷󠁿 Flag for Inčukalns (LV-037) 🏴󠁦󠁲󠁴󠁦󠁿 Flag for French Southern Territories (FR-TF) 🏴󠁭󠁸󠁲󠁯󠁯󠁿 Flag for Quintana Roo (MX-ROO) 👩🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👨🏾‍👨🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁨󠁵󠁧󠁳󠁿 Flag for Győr-Moson-Sopron (HU-GS) 👩🏿‍👩🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone  Shibuya 👩‍❤️‍👨🏽 Couple With Heart - Woman, Man: Medium Skin Tone 🏴󠁷󠁳󠁧󠁩󠁿 Flag for Gaga’ifomauga (WS-GI) 🏴󠁨󠁴󠁮󠁥󠁿 Flag for Nord-Est (HT-NE) 🏴󠁳󠁧󠀰󠀱󠁿 Flag for Central Singapore (SG-01) 🏴󠁥󠁣󠁴󠁿 Flag for Tungurahua (EC-T) # Number Sign 👨🏻‍👨🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 1 Digit One 🏴󠁢󠁯󠁴󠁿 Flag for Tarija (BO-T) 👨🏾‍👩🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁩󠁣󠁩󠁿 Flag for Cibitoke (BI-CI) 🏴󠁭󠁶󠁵󠁳󠁿 Flag for Upper South Province (MV-US) 🏴󠁡󠁤󠀰󠀲󠁿 Flag for Canillo (AD-02) 🏴󠁡󠁦󠁢󠁡󠁭󠁿 Flag for Bamyan (AF-BAM) 🏴󠁡󠁤󠀰󠀳󠁿 Flag for Encamp (AD-03) 🏴󠁵󠁳󠁭󠁰󠁿 Flag for Northern Mariana Islands (US-MP) 🏴󠁬󠁶󠀰󠀱󠀲󠁿 Flag for Babīte (LV-012) 🏴󠁥󠁣󠁸󠁿 Flag for Cotopaxi (EC-X) 🏴󠁧󠁡󠀴󠁿 Flag for Ngounié (GA-4) * Asterisk 󠁺 Tag Latin Small Letter Z 🏴󠁡󠁤󠀰󠀴󠁿 Flag for La Massana (AD-04) 󠀳 Tag Digit Three 👩🏼‍❤️‍💋‍👩🏻 Kiss - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone 🏴󠁭󠁥󠀰󠀳󠁿 Flag for Berane (ME-03) 👨🏿‍❤️‍💋‍👨🏽 Kiss - Man: Dark Skin Tone, Man: Medium Skin Tone 🏴󠁤󠁯󠀳󠀷󠁿 Flag for El Valle (DO-37) 👩🏾‍❤️‍👩🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁫󠁥󠀰󠀱󠁿 Flag for Baringo (KE-01) 🏴󠁹󠁥󠁳󠁡󠁿 Flag for Amanat Al Asimah (YE-SA) 👨🏼‍👨🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 󠀲 Tag Digit Two 🏴󠁭󠁴󠀲󠀰󠁿 Flag for Senglea (MT-20) 🕴️‍♀️ Woman in Business Suit Levitating 🏴󠁣󠁦󠁨󠁭󠁿 Flag for Haut-Mbomou (CF-HM) 󠀱 Tag Digit One 󠀴 Tag Digit Four 🏴󠁡󠁺󠁡󠁢󠁳󠁿 Flag for Absheron (AZ-ABS) 6 Digit Six 🏴󠁬󠁡󠁳󠁶󠁿 Flag for Savannakhet (LA-SV) 🏴󠁭󠁬󠀱󠁿 Flag for Kayes (ML-1) 🏴󠁡󠁥󠁡󠁺󠁿 Flag for Abu Dhabi (AE-AZ) 🏴󠁥󠁳󠁡󠁳󠁿 Flag for Asturias (ES-AS) 🏴󠁩󠁱󠁫󠁩󠁿 Flag for Kirkuk (IQ-KI) 👩‍❤️‍👩🏽 Couple With Heart - Woman, Woman: Medium Skin Tone 🏴󠁤󠁥󠁢󠁥󠁿 Flag for Berlin (DE-BE) 8 Digit Eight 🏴󠁡󠁤󠀰󠀸󠁿 Flag for Escaldes-Engordany (AD-08) 🏴󠁣󠁮󠀶󠀴󠁿 Flag for Ningxia (CN-64) 🏴󠁥󠁣󠁦󠁿 Flag for Cañar (EC-F) 🏴󠁡󠁥󠁡󠁪󠁿 Flag for Ajman (AE-AJ) 🕴🏻‍♀️ Woman in Business Suit Levitating: Light Skin Tone 👨🏻‍❤️‍💋‍👩 Kiss - Man: Light Skin Tone, Woman 󠀸 Tag Digit Eight 🏴󠁩󠁲󠀱󠀴󠁿 Flag for Fars (IR-14) 🏴󠁡󠁥󠁦󠁵󠁿 Flag for Fujairah (AE-FU) 👨🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁨󠁲󠀱󠀰󠁿 Flag for Virovitica-Podravina (HR-10) 󠁩 Tag Latin Small Letter I 7 Digit Seven 󠀷 Tag Digit Seven 󠁥 Tag Latin Small Letter E 👩🏼‍👩🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁭󠁨󠁴󠁿 Flag for Ratak Chain (MH-T) 🏴󠁡󠁥󠁳󠁨󠁿 Flag for Sharjah (AE-SH) 󠁦 Tag Latin Small Letter F 🏴󠁬󠁴󠀵󠀷󠁿 Flag for Vilniaus Municipality (LT-57) 🏴󠁩󠁳󠀴󠁿 Flag for Westfjords (IS-4) 🏴󠁣󠁡󠁢󠁣󠁿 Flag for British Columbia (CA-BC) 4 Digit Four 🏴󠁡󠁦󠁢󠁡󠁬󠁿 Flag for Balkh (AF-BAL) 👨‍👶‍👦 Family: Man, Baby, Boy 🏴󠁴󠁷󠁨󠁳󠁱󠁿 Flag for Hsinchu County (TW-HSQ) 👩‍👶‍👧 Family: Woman, Baby, Girl 🏴󠁭󠁸󠁪󠁡󠁬󠁿 Flag for Jalisco (MX-JAL) 🏴󠁫󠁥󠀱󠀸󠁿 Flag for Kitui (KE-18) 🏴󠁰󠁴󠀲󠀰󠁿 Flag for Azores (PT-20) 🏴󠁩󠁮󠁭󠁮󠁿 Flag for Manipur (IN-MN) 🏴󠁡󠁦󠁢󠁤󠁳󠁿 Flag for Badakhshan (AF-BDS) 👩🏻‍❤️‍👩🏼 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁡󠁤󠀰󠀵󠁿 Flag for Ordino (AD-05) 👩🏽‍❤️‍💋‍👩 Kiss - Woman: Medium Skin Tone, Woman 🏴󠁡󠁦󠁢󠁧󠁬󠁿 Flag for Baghlan (AF-BGL) 🏴󠁮󠁧󠁣󠁲󠁿 Flag for Cross River (NG-CR) 🏴󠁵󠁳󠁣󠁯󠁿 Flag for Colorado (US-CO) 󠁴 Tag Latin Small Letter T 🏴󠁭󠁫󠀶󠀴󠁿 Flag for Radoviš (MK-64) 🏴󠁮󠁺󠁷󠁧󠁮󠁿 Flag for Wellington (NZ-WGN) 👨🏽‍👨🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁩󠁲󠀱󠀶󠁿 Flag for Kurdistan (IR-16) 👨🏽‍❤️‍💋‍👨🏿 Kiss - Man: Medium Skin Tone, Man: Dark Skin Tone 󠁳 Tag Latin Small Letter S 👩‍👶‍👶 Family: Woman, Baby, Baby 🏴󠁡󠁦󠁤󠁡󠁹󠁿 Flag for Daykundi (AF-DAY) 👨🏻‍❤️‍💋‍👨🏾 Kiss - Man: Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁡󠁦󠁦󠁲󠁡󠁿 Flag for Farah (AF-FRA) 󠁱 Tag Latin Small Letter Q 🏴󠁧󠁴󠁧󠁵󠁿 Flag for Guatemala (GT-GU) 🏴󠁣󠁨󠁴󠁧󠁿 Flag for Thurgau (CH-TG) 🏴󠁲󠁵󠁣󠁥󠁿 Flag for Chechen (RU-CE) 󠀵 Tag Digit Five 🏴󠁡󠁦󠁧󠁨󠁯󠁿 Flag for Ghōr (AF-GHO) 🏴󠁡󠁴󠀹󠁿 Flag for Vienna (AT-9) 🏴󠁡󠁦󠁧󠁨󠁡󠁿 Flag for Ghazni (AF-GHA) 󠁵 Tag Latin Small Letter U 🏴󠁢󠁷󠁧󠁡󠁿 Flag for Gaborone (BW-GA) 󠁹 Tag Latin Small Letter Y 󠁿 Cancel Tag 󠁷 Tag Latin Small Letter W 👩🏽‍❤️‍👩🏿 Couple With Heart - Woman: Medium Skin Tone, Woman: Dark Skin Tone 🏴󠁣󠁯󠁡󠁭󠁡󠁿 Flag for Amazonas (CO-AMA) 󠁮 Tag Latin Small Letter N 👩‍❤️‍💋‍👩🏽 Kiss - Woman, Woman: Medium Skin Tone 👨‍👶 Family: Man, Baby 🏴󠁡󠁴󠀱󠁿 Flag for Burgenland (AT-1) 🏴󠁡󠁦󠁨󠁥󠁬󠁿 Flag for Helmand (AF-HEL) 󠀶 Tag Digit Six 🏴󠁡󠁦󠁪󠁯󠁷󠁿 Flag for Jowzjan (AF-JOW) 🧕‍♀️ Woman With Headscarf 󠁢 Tag Latin Small Letter B 󠀰 Tag Digit Zero 🏴󠁡󠁦󠁨󠁥󠁲󠁿 Flag for Herat (AF-HER) 🏴󠁧󠁤󠀰󠀵󠁿 Flag for Saint Mark (GD-05) 3 Digit Three 󠁧 Tag Latin Small Letter G 🕴🏾‍♀️ Woman in Business Suit Levitating: Medium-Dark Skin Tone 👩🏽‍❤️‍💋‍👨🏽 Kiss - Woman: Medium Skin Tone, Man: Medium Skin Tone 🏴󠁵󠁳󠁡󠁫󠁿 Flag for Alaska (US-AK) 󠁲 Tag Latin Small Letter R 🏴󠁴󠁬󠁬󠁡󠁿 Flag for Lautém (TL-LA) 🏴󠁡󠁦󠁫󠁡󠁢󠁿 Flag for Kabul (AF-KAB) 👨‍❤️‍💋‍👨🏿 Kiss - Man, Man: Dark Skin Tone 🧕‍♂️ Man With Headscarf 󠁶 Tag Latin Small Letter V 󠁤 Tag Latin Small Letter D 🏴󠁡󠁦󠁫󠁡󠁮󠁿 Flag for Kandahar (AF-KAN) 🏴󠁡󠁦󠁫󠁡󠁰󠁿 Flag for Kapisa (AF-KAP) 🏴󠁭󠁣󠁳󠁲󠁿 Flag for Saint Roman (MC-SR) 🏴󠁥󠁥󠀳󠀹󠁿 Flag for Hiiu (EE-39) 󠁭 Tag Latin Small Letter M 🏴󠁡󠁦󠁫󠁨󠁯󠁿 Flag for Khost (AF-KHO) 🧕🏻‍♂️ Man With Headscarf: Light Skin Tone 🏴󠁡󠁦󠁫󠁤󠁺󠁿 Flag for Kunduz (AF-KDZ) 👩🏿‍❤️‍👨 Couple With Heart - Woman: Dark Skin Tone, Man 🏴󠁵󠁳󠁳󠁤󠁿 Flag for South Dakota (US-SD) 🏴󠁡󠁦󠁢󠁤󠁧󠁿 Flag for Badghis (AF-BDG) 🏴󠁩󠁳󠀸󠁿 Flag for Southern (IS-8) 🏴󠁡󠁦󠁫󠁮󠁲󠁿 Flag for Kunar (AF-KNR) 👨‍👨‍👶‍👶 Family: Man, Man, Baby, Baby 🏴󠁪󠁰󠀱󠀳󠁿 Flag for Tokyo (JP-13) 🏴󠁡󠁦󠁬󠁡󠁧󠁿 Flag for Laghman (AF-LAG) 🧕🏽‍♂️ Man With Headscarf: Medium Skin Tone 🏴󠁡󠁦󠁬󠁯󠁧󠁿 Flag for Logar (AF-LOG) 5 Digit Five 󠁣 Tag Latin Small Letter C 🏴󠁡󠁦󠁦󠁹󠁢󠁿 Flag for Faryab (AF-FYB) 󠁰 Tag Latin Small Letter P 🏴󠁡󠁦󠁮󠁡󠁮󠁿 Flag for Nangarhar (AF-NAN) 󠀹 Tag Digit Nine 🏴󠁥󠁳󠁮󠁣󠁿 Flag for Navarra Chartered Community (ES-NC) 👩🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁭󠁸󠁮󠁡󠁹󠁿 Flag for Nayarit (MX-NAY) 🏴󠁢󠁲󠁰󠁥󠁿 Flag for Pernambuco (BR-PE) 🏴󠁩󠁴󠀷󠀲󠁿 Flag for Campania (IT-72) 🧕🏾‍♂️ Man With Headscarf: Medium-Dark Skin Tone 👩🏽‍❤️‍💋‍👩🏾 Kiss - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁦󠁮󠁵󠁲󠁿 Flag for Nuristan (AF-NUR) 👨‍👨‍👧‍👶 Family: Man, Man, Girl, Baby 🏴󠁰󠁧󠁷󠁢󠁫󠁿 Flag for West New Britain (PG-WBK) 👨🏼‍👩🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁧󠁹󠁵󠁤󠁿 Flag for Upper Demerara-Berbice (GY-UD) 👨‍❤️‍💋‍👩 Kiss - Man, Woman 🏴󠁥󠁴󠁡󠁦󠁿 Flag for Afar (ET-AF) 🏴󠁡󠁦󠁰󠁡󠁲󠁿 Flag for Parwan (AF-PAR) 🏴󠁡󠁦󠁮󠁩󠁭󠁿 Flag for Nimruz (AF-NIM) 🏴󠁨󠁲󠀰󠀴󠁿 Flag for Karlovac (HR-04) 🏴󠁡󠁦󠁰󠁩󠁡󠁿 Flag for Paktia (AF-PIA) 🧕🏿‍♂️ Man With Headscarf: Dark Skin Tone 🧕🏼‍♂️ Man With Headscarf: Medium-Light Skin Tone 🏴󠁭󠁸󠁢󠁣󠁮󠁿 Flag for Baja California (MX-BCN) 🏴󠁡󠁦󠁰󠁫󠁡󠁿 Flag for Paktika (AF-PKA) 🏴󠁫󠁩󠁰󠁿 Flag for Phoenix Islands (KI-P) 󠁯 Tag Latin Small Letter O 🏴󠁡󠁦󠁰󠁡󠁮󠁿 Flag for Panjshir (AF-PAN) 🏴󠁣󠁨󠁴󠁩󠁿 Flag for Ticino (CH-TI) 🏴󠁳󠁩󠀱󠀹󠀲󠁿 Flag for Žirovnica (SI-192) 🏴󠁳󠁥󠁮󠁿 Flag for Halland (SE-N) 󠁪 Tag Latin Small Letter J 👩🏽‍❤️‍💋‍👩🏻 Kiss - Woman: Medium Skin Tone, Woman: Light Skin Tone 👨🏾‍👨🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁳󠁳󠁢󠁮󠁿 Flag for Northern Bahr el Ghazal (SS-BN) 👨🏽‍❤️‍💋‍👩 Kiss - Man: Medium Skin Tone, Woman 🏴󠁣󠁦󠁢󠁫󠁿 Flag for Basse-Kotto (CF-BK) 👨‍❤️‍👨🏻 Couple With Heart - Man, Man: Light Skin Tone 👨🏽‍❤️‍👨 Couple With Heart - Man: Medium Skin Tone, Man 🏴󠁬󠁹󠁢󠁵󠁿 Flag for Butnan (LY-BU) 👩‍👶 Family: Woman, Baby 🏴󠁬󠁫󠀹󠁿 Flag for Sabaragamuwa (LK-9) 🏴󠁡󠁦󠁳󠁡󠁭󠁿 Flag for Samangan (AF-SAM) 🏴󠁴󠁶󠁮󠁫󠁬󠁿 Flag for Nukulaelae (TV-NKL) 🏴󠁡󠁥󠁲󠁫󠁿 Flag for Ras al-Khaimah (AE-RK) 🏴󠁥󠁳󠁣󠁥󠁿 Flag for Ceuta (ES-CE) 🏴󠁡󠁥󠁤󠁵󠁿 Flag for Dubai (AE-DU) 👨🏻‍👨🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 🏴󠁪󠁰󠀴󠀷󠁿 Flag for Okinawa (JP-47) 🏴󠁡󠁦󠁳󠁡󠁲󠁿 Flag for Sar-e Pol (AF-SAR) 👩🏼‍👩🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 󠁬 Tag Latin Small Letter L 🏴󠁡󠁦󠁵󠁲󠁵󠁿 Flag for Urozgan (AF-URU) 9 Digit Nine 👩🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨‍❤️‍💋‍👨🏽 Kiss - Man, Man: Medium Skin Tone 🏴󠁤󠁭󠀰󠀶󠁿 Flag for Saint Joseph (DM-06) 🏴󠁡󠁧󠀰󠀴󠁿 Flag for Saint John (AG-04) 🏴󠁣󠁯󠁶󠁩󠁤󠁿 Flag for Vichada (CO-VID) 🏴󠁰󠁷󠀲󠀱󠀸󠁿 Flag for Ngarchelong (PW-218) 🏴󠁲󠁵󠁡󠁲󠁫󠁿 Flag for Arkhangelsk (RU-ARK) 🏴󠁡󠁦󠁺󠁡󠁢󠁿 Flag for Zabul (AF-ZAB) 🏴󠁡󠁧󠀰󠀳󠁿 Flag for Saint George (AG-03) 🏴󠁩󠁴󠀲󠀵󠁿 Flag for Lombardy (IT-25) 👨🏻‍❤️‍💋‍👨🏻 Kiss - Man: Light Skin Tone, Man: Light Skin Tone 🏴󠁣󠁺󠀵󠀳󠁿 Flag for Pardubický kraj (CZ-53) 🏴󠁡󠁧󠀰󠀶󠁿 Flag for Saint Paul (AG-06) 🏴󠁶󠁮󠀵󠀱󠁿 Flag for Trà Vinh (VN-51) 👩‍👨‍👶‍👧 Family: Woman, Man, Baby, Girl 🏴󠁫󠁲󠀴󠀸󠁿 Flag for South Gyeongsang (KR-48) 🏴󠁡󠁧󠀰󠀵󠁿 Flag for Saint Mary (AG-05) 🏴󠁧󠁲󠁫󠁿 Flag for North Aegean (GR-K) 👩‍👩‍👶‍👧 Family: Woman, Woman, Baby, Girl 🏴󠁥󠁣󠁺󠁿 Flag for Zamora-Chinchipe (EC-Z) 🏴󠁮󠁩󠁭󠁳󠁿 Flag for Masaya (NI-MS) 🏴󠁫󠁩󠁧󠁿 Flag for Gilbert Islands (KI-G) 🏴󠁭󠁸󠁣󠁨󠁨󠁿 Flag for Chihuahua (MX-CHH) 👨🏼‍👨🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👨🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩‍👨‍👶‍👶 Family: Woman, Man, Baby, Baby 🏴󠁡󠁧󠀱󠀱󠁿 Flag for Redonda (AG-11) 👩‍👩‍👶 Family: Woman, Woman, Baby 👨‍❤️‍💋‍👩🏻 Kiss - Man, Woman: Light Skin Tone 👨‍❤️‍💋‍👨🏾 Kiss - Man, Man: Medium-Dark Skin Tone 🏴󠁡󠁬󠀰󠀱󠁿 Flag for Berat County (AL-01) 󠁡 Tag Latin Small Letter A 🏴󠁡󠁧󠀱󠀰󠁿 Flag for Barbuda (AG-10) 🏴󠁣󠁯󠁳󠁡󠁰󠁿 Flag for San Andrés & Providencia (CO-SAP) 🏴󠁡󠁬󠀰󠀳󠁿 Flag for Elbasan County (AL-03) 👨🏾‍👨🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁩󠁮󠁫󠁡󠁿 Flag for Karnataka (IN-KA) 🏴󠁡󠁬󠀰󠀵󠁿 Flag for Gjirokastër County (AL-05) 🏴󠁪󠁰󠀰󠀱󠁿 Flag for Hokkaidō (JP-01) 👩🏾‍👨🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁵󠁧󠁣󠁿 Flag for Central (UG-C) 👨🏼‍❤️‍💋‍👨 Kiss - Man: Medium-Light Skin Tone, Man 🏴󠁡󠁬󠀰󠀲󠁿 Flag for Durrës County (AL-02) 🏴󠁡󠁬󠀰󠀴󠁿 Flag for Fier County (AL-04) 🏴󠁡󠁬󠀰󠀶󠁿 Flag for Korçë County (AL-06) 🏴󠁰󠁹󠀱󠀶󠁿 Flag for Alto Paraguay (PY-16) 🏴󠁡󠁬󠀰󠀷󠁿 Flag for Kukës County (AL-07) 👨🏿‍❤️‍💋‍👨 Kiss - Man: Dark Skin Tone, Man 🏴󠁧󠁹󠁵󠁴󠁿 Flag for Upper Takutu-Upper Essequibo (GY-UT) 👨🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍👨🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁡󠁬󠀰󠀹󠁿 Flag for Dibër County (AL-09) 🏴󠁡󠁬󠀰󠀸󠁿 Flag for Lezhë County (AL-08) 👨🏼‍👨🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁡󠁬󠀱󠀱󠁿 Flag for Tirana County (AL-11) 🏴󠁡󠁤󠀰󠀶󠁿 Flag for Sant Julià de Lòria (AD-06) 🏴󠁢󠁲󠁢󠁡󠁿 Flag for Bahia (BR-BA) 🏴󠁡󠁬󠀱󠀰󠁿 Flag for Shkodër County (AL-10) 👩‍❤️‍💋‍👨🏿 Kiss - Woman, Man: Dark Skin Tone 👨🏽‍👨🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩‍❤️‍💋‍👨🏽 Kiss - Woman, Man: Medium Skin Tone 🏴󠁡󠁬󠀱󠀲󠁿 Flag for Vlorë County (AL-12) 🏴󠁴󠁨󠀲󠀳󠁿 Flag for Trat (TH-23) 🏴󠁡󠁭󠁧󠁲󠁿 Flag for Gegharkunik (AM-GR) 👨🏿‍👨🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁡󠁭󠁡󠁧󠁿 Flag for Aragatsotn (AM-AG) 🏴󠁡󠁭󠁡󠁲󠁿 Flag for Ararat (AM-AR) 🏴󠁡󠁭󠁥󠁲󠁿 Flag for Yerevan (AM-ER) 🏴󠁡󠁭󠁫󠁴󠁿 Flag for Kotayk (AM-KT) 🏴󠁦󠁲󠁣󠁯󠁲󠁿 Flag for Corse (FR-COR) 🏴󠁡󠁭󠁡󠁶󠁿 Flag for Armavir (AM-AV) 👩‍❤️‍💋‍👩🏿 Kiss - Woman, Woman: Dark Skin Tone 🏴󠁢󠁲󠁭󠁧󠁿 Flag for Minas Gerais (BR-MG) 🏴󠁣󠁧󠀱󠀶󠁿 Flag for Pointe-Noire (CG-16) 🏴󠁡󠁭󠁬󠁯󠁿 Flag for Lori (AM-LO) 🏴󠁤󠁺󠀲󠀱󠁿 Flag for Skikda (DZ-21) 🏴󠁡󠁭󠁳󠁨󠁿 Flag for Shirak (AM-SH) 👩‍❤️‍💋‍👩🏾 Kiss - Woman, Woman: Medium-Dark Skin Tone 🏴󠁡󠁤󠀰󠀷󠁿 Flag for Andorra la Vella (AD-07) 🏴󠁲󠁵󠁡󠁬󠁴󠁿 Flag for Altai Krai (RU-ALT) 🏴󠁳󠁩󠀱󠀶󠀷󠁿 Flag for Lovrenc na Pohorju (SI-167) 👩‍❤️‍💋‍👩🏼 Kiss - Woman, Woman: Medium-Light Skin Tone 👨🏿‍❤️‍💋‍👩🏻 Kiss - Man: Dark Skin Tone, Woman: Light Skin Tone 🏴󠁬󠁴󠁰󠁮󠁿 Flag for Panevėžys County (LT-PN) 🏴󠁤󠁯󠀳󠀵󠁿 Flag for Cibao Norte (DO-35) 🏴󠁮󠁯󠀱󠀰󠁿 Flag for Vest-Agder (NO-10) 👨‍❤️‍💋‍👩🏿 Kiss - Man, Woman: Dark Skin Tone 🏴󠁡󠁭󠁶󠁤󠁿 Flag for Vayots Dzor (AM-VD) 👩🏻‍❤️‍💋‍👩🏻 Kiss - Woman: Light Skin Tone, Woman: Light Skin Tone 🏴󠁵󠁳󠁶󠁴󠁿 Flag for Vermont (US-VT) 👨🏽‍❤️‍💋‍👨 Kiss - Man: Medium Skin Tone, Man 🏴󠁡󠁯󠁢󠁧󠁯󠁿 Flag for Bengo (AO-BGO) 👩🏻‍❤️‍💋‍👩 Kiss - Woman: Light Skin Tone, Woman 🏴󠁣󠁯󠁭󠁥󠁴󠁿 Flag for Meta (CO-MET) 🏴󠁮󠁬󠁢󠁱󠀲󠁿 Flag for Saba (NL-BQ2) 👩🏽‍❤️‍💋‍👩🏼 Kiss - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone 👨🏽‍👩🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁡󠁯󠁢󠁧󠁵󠁿 Flag for Benguela (AO-BGU) 🏴󠁣󠁯󠁳󠁵󠁣󠁿 Flag for Sucre (CO-SUC) 🏴󠁡󠁯󠁣󠁣󠁵󠁿 Flag for Cuando Cubango (AO-CCU) 🏴󠁰󠁥󠁭󠁤󠁤󠁿 Flag for Madre de Dios (PE-MDD) 🏴󠁣󠁨󠁶󠁤󠁿 Flag for Vaud (CH-VD) 🏴󠁡󠁯󠁢󠁩󠁥󠁿 Flag for Bié (AO-BIE) 🏴󠁡󠁯󠁣󠁡󠁢󠁿 Flag for Cabinda (AO-CAB) 🏴󠁡󠁯󠁨󠁵󠁩󠁿 Flag for Huíla (AO-HUI) 🏴󠁡󠁯󠁣󠁵󠁳󠁿 Flag for Cuanza Sul (AO-CUS) 👨‍❤️‍💋‍👩🏽 Kiss - Man, Woman: Medium Skin Tone 👩‍👩‍👦‍👶 Family: Woman, Woman, Boy, Baby 🏴󠁡󠁯󠁨󠁵󠁡󠁿 Flag for Huambo (AO-HUA) 👨🏼‍❤️‍👩🏾 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁣󠁹󠀰󠀶󠁿 Flag for Kyrenia (CY-06) 👩🏼‍❤️‍💋‍👨🏻 Kiss - Woman: Medium-Light Skin Tone, Man: Light Skin Tone 🏴󠁡󠁥󠁵󠁱󠁿 Flag for Umm al-Quwain (AE-UQ) 🏴󠁡󠁯󠁬󠁳󠁵󠁿 Flag for Lunda Sul (AO-LSU) 🏴󠁬󠁲󠁣󠁭󠁿 Flag for Grand Cape Mount (LR-CM) 🏴󠁡󠁯󠁬󠁮󠁯󠁿 Flag for Lunda Norte (AO-LNO) 👩🏽‍❤️‍👨🏿 Couple With Heart - Woman: Medium Skin Tone, Man: Dark Skin Tone 👨🏾‍❤️‍👩🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁯󠁣󠁮󠁯󠁿 Flag for Cuanza Norte (AO-CNO) 🏴󠁡󠁯󠁭󠁡󠁬󠁿 Flag for Malanje (AO-MAL) 👩🏼‍❤️‍💋‍👩 Kiss - Woman: Medium-Light Skin Tone, Woman 👨🏼‍👩🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁡󠁯󠁭󠁯󠁸󠁿 Flag for Moxico (AO-MOX) 🏴󠁡󠁯󠁮󠁡󠁭󠁿 Flag for Namibe (AO-NAM) 👨🏾‍👩🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 󠁫 Tag Latin Small Letter K 🕴🏼‍♀️ Woman in Business Suit Levitating: Medium-Light Skin Tone 🏴󠁡󠁲󠁡󠁿 Flag for Salta (AR-A) 👨🏾‍👩🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁣󠁤󠁬󠁵󠁿 Flag for Lualaba (CD-LU) 🏴󠁡󠁲󠁢󠁿 Flag for Buenos Aires Province (AR-B) 👨🏿‍👩🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁡󠁲󠁤󠁿 Flag for San Luis (AR-D) 🏴󠁡󠁯󠁺󠁡󠁩󠁿 Flag for Zaire (AO-ZAI) 🏴󠁴󠁲󠀰󠀳󠁿 Flag for Afyonkarahisar (TR-03) 0 Digit Zero 🏴󠁶󠁮󠀲󠀵󠁿 Flag for Quảng Trị (VN-25) 🕴🏿‍♀️ Woman in Business Suit Levitating: Dark Skin Tone 🏴󠁡󠁯󠁵󠁩󠁧󠁿 Flag for Uíge (AO-UIG) 👩🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁵󠁡󠀱󠀸󠁿 Flag for Zhytomyrshchyna (UA-18) 👨🏾‍❤️‍💋‍👨🏽 Kiss - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁣󠁯󠁣󠁥󠁳󠁿 Flag for Cesar (CO-CES) 🏴󠁡󠁭󠁳󠁵󠁿 Flag for Syunik (AM-SU) 🏴󠁡󠁲󠁥󠁿 Flag for Entre Ríos (AR-E) 👨🏿‍❤️‍💋‍👩 Kiss - Man: Dark Skin Tone, Woman 🏴󠁡󠁲󠁦󠁿 Flag for La Rioja (AR-F) 🏴󠁫󠁺󠁶󠁯󠁳󠁿 Flag for East Kazakhstan (KZ-VOS) 🏴󠁡󠁦󠁷󠁡󠁲󠁿 Flag for Maidan Wardak (AF-WAR) 🏴󠁡󠁲󠁪󠁿 Flag for San Juan (AR-J) 👩🏾‍👩🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁡󠁯󠁬󠁵󠁡󠁿 Flag for Luanda (AO-LUA) 🏴󠁡󠁲󠁬󠁿 Flag for La Pampa (AR-L) 👩🏼‍❤️‍💋‍👩🏽 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone 👨🏼‍👩🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏼‍👩🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁡󠁲󠁫󠁿 Flag for Catamarca (AR-K) 🏴󠁡󠁲󠁲󠁿 Flag for Río Negro (AR-R) 🏴󠁡󠁲󠁨󠁿 Flag for Chaco (AR-H) 🏴󠁡󠁲󠁰󠁿 Flag for Formosa (AR-P) 🏴󠁡󠁲󠁭󠁿 Flag for Mendoza (AR-M) 🏴󠁡󠁲󠁮󠁿 Flag for Misiones (AR-N) 🏴󠁡󠁲󠁱󠁿 Flag for Neuquén (AR-Q) 👨🏽‍👩🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁡󠁲󠁴󠁿 Flag for Tucumán (AR-T) 🏴󠁡󠁲󠁳󠁿 Flag for Santa Fe (AR-S) 🏴󠁡󠁲󠁷󠁿 Flag for Corrientes (AR-W) 🏴󠁡󠁲󠁹󠁿 Flag for Jujuy (AR-Y) 🏴󠁡󠁲󠁶󠁿 Flag for Tierra del Fuego (AR-V) 🏴󠁡󠁲󠁵󠁿 Flag for Chubut (AR-U) 🏴󠁡󠁲󠁸󠁿 Flag for Córdoba (AR-X) 🏴󠁡󠁲󠁺󠁿 Flag for Santa Cruz (AR-Z) 🏴󠁡󠁲󠁧󠁿 Flag for Santiago del Estero (AR-G) 🏴󠁡󠁴󠀲󠁿 Flag for Carinthia (AT-2) 🏴󠁣󠁨󠁢󠁬󠁿 Flag for Basel-Landschaft (CH-BL) 👩🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍👩🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨‍👨‍👦‍👧 Family: Man, Man, Boy, Girl 🏴󠁡󠁴󠀳󠁿 Flag for Lower Austria (AT-3) 👩‍👶‍👦 Family: Woman, Baby, Boy 🏴󠁭󠁲󠀱󠀳󠁿 Flag for Nouakchott Ouest (MR-13) 👨🏼‍👩🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁣󠁦󠁭󠁢󠁿 Flag for Mbomou (CF-MB) 🏴󠁡󠁴󠀶󠁿 Flag for Styria (AT-6) 🏴󠁰󠁨󠀰󠀱󠁿 Flag for Ilocos (PH-01) 🏴󠁡󠁴󠀷󠁿 Flag for Tyrol (AT-7) 🏴󠁣󠁮󠀵󠀲󠁿 Flag for Guizhou (CN-52) 🏴󠁬󠁡󠁸󠁳󠁿 Flag for Xaisomboun (LA-XS) 🏴󠁡󠁴󠀸󠁿 Flag for Vorarlberg (AT-8) 👨🏼‍👨🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁡󠁴󠀵󠁿 Flag for Salzburg (AT-5) 👨🏿‍👩🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩‍👩‍👶‍👶 Family: Woman, Woman, Baby, Baby 👩‍👨‍👧‍👦 Family: Woman, Man, Girl, Boy 👩‍👨‍👧 Family: Woman, Man, Girl 👩‍👦‍👶 Family: Woman, Boy, Baby 🏴󠁡󠁵󠁮󠁳󠁷󠁿 Flag for New South Wales (AU-NSW) 👩‍👨‍👧‍👶 Family: Woman, Man, Girl, Baby 👩🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁡󠁵󠁮󠁴󠁿 Flag for Northern Territory (AU-NT) 👩🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁡󠁵󠁱󠁬󠁤󠁿 Flag for Queensland (AU-QLD) 2 Digit Two 👩‍👨‍👧‍👧 Family: Woman, Man, Girl, Girl 👩🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁡󠁴󠀴󠁿 Flag for Upper Austria (AT-4) 🏴󠁧󠁲󠁡󠁿 Flag for East Macedonia and Thrace (GR-A) 👨🏽‍👩🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👩🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨‍👶‍👧 Family: Man, Baby, Girl 👨🏻‍👩🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone 👨🏿‍👩🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👩‍👨‍👶 Family: Woman, Man, Baby 🏴󠁵󠁳󠁮󠁥󠁿 Flag for Nebraska (US-NE) 🏴󠁡󠁺󠁡󠁧󠁡󠁿 Flag for Agstafa (AZ-AGA) 🏴󠁡󠁦󠁴󠁡󠁫󠁿 Flag for Takhar (AF-TAK) 🏴󠁡󠁵󠁷󠁡󠁿 Flag for Western Australia (AU-WA) 🏴󠁡󠁺󠁡󠁧󠁣󠁿 Flag for Aghjabadi (AZ-AGC) 🏴󠁡󠁺󠁡󠁳󠁴󠁿 Flag for Astara (AZ-AST) 🏴󠁡󠁺󠁢󠁡󠁬󠁿 Flag for Balakan (AZ-BAL) 👩‍❤️‍💋‍👨🏼 Kiss - Woman, Man: Medium-Light Skin Tone 🏴󠁵󠁳󠁣󠁡󠁿 Flag for California (US-CA) 🏴󠁡󠁺󠁡󠁧󠁳󠁿 Flag for Agdash (AZ-AGS) 🏴󠁡󠁺󠁢󠁡󠁿 Flag for Baku (AZ-BA) 👨🏻‍❤️‍💋‍👩🏿 Kiss - Man: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁵󠁶󠁩󠁣󠁿 Flag for Victoria (AU-VIC) 🏴󠁡󠁺󠁡󠁧󠁭󠁿 Flag for Agdam (AZ-AGM) 👨🏻‍👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone 🏴󠁡󠁺󠁢󠁡󠁲󠁿 Flag for Barda (AZ-BAR) 👨🏽‍👩🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁡󠁺󠁡󠁧󠁵󠁿 Flag for Agsu (AZ-AGU) 🏴󠁣󠁤󠁴󠁡󠁿 Flag for Tanganyika (CD-TA) 👩🏻‍❤️‍👨🏼 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁡󠁺󠁢󠁩󠁬󠁿 Flag for Bilasuvar (AZ-BIL) 🏴󠁡󠁺󠁣󠁡󠁬󠁿 Flag for Jalilabad (AZ-CAL) 🏴󠁡󠁺󠁣󠁡󠁢󠁿 Flag for Jabrayil (AZ-CAB) 🏴󠁡󠁺󠁢󠁥󠁹󠁿 Flag for Beylagan (AZ-BEY) 🏴󠁳󠁩󠀰󠀸󠀵󠁿 Flag for Novo Mesto (SI-085) 🏴󠁣󠁧󠀹󠁿 Flag for Niari (CG-9) 🏴󠁡󠁺󠁤󠁡󠁳󠁿 Flag for Dashkasan (AZ-DAS) 🏴󠁡󠁺󠁦󠁵󠁺󠁿 Flag for Fizuli (AZ-FUZ) 👩🏿‍❤️‍💋‍👨🏽 Kiss - Woman: Dark Skin Tone, Man: Medium Skin Tone 👨🏿‍❤️‍👨🏾 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁡󠁺󠁧󠁯󠁹󠁿 Flag for Goychay (AZ-GOY) 🏴󠁡󠁺󠁧󠁯󠁲󠁿 Flag for Goranboy (AZ-GOR) 🏴󠁡󠁺󠁧󠁡󠁿 Flag for Ganja (AZ-GA) 🏴󠁱󠁡󠁵󠁳󠁿 Flag for Umm Salal (QA-US) 🏴󠁦󠁪󠁥󠁿 Flag for Eastern (FJ-E) 🏴󠁡󠁺󠁧󠁹󠁧󠁿 Flag for Goygol (AZ-GYG) 🏴󠁡󠁺󠁨󠁡󠁣󠁿 Flag for Hajigabul (AZ-HAC) 👩🏿‍❤️‍💋‍👩 Kiss - Woman: Dark Skin Tone, Woman 🏴󠁬󠁶󠀰󠀷󠀷󠁿 Flag for Rēzekne Municipality (LV-077) 🏴󠁡󠁵󠁡󠁣󠁴󠁿 Flag for Australian Capital Territory (AU-ACT) 👨🏽‍❤️‍💋‍👩🏾 Kiss - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁮󠁧󠁦󠁣󠁿 Flag for Federal Capital Territory (NG-FC) 🏴󠁲󠁵󠁢󠁲󠁹󠁿 Flag for Bryansk (RU-BRY) 🏴󠁡󠁭󠁴󠁶󠁿 Flag for Tavush (AM-TV) 🏴󠁥󠁣󠁳󠁤󠁿 Flag for Santo Domingo de los Tsáchilas (EC-SD) 👩🏼‍❤️‍👩 Couple With Heart - Woman: Medium-Light Skin Tone, Woman 🏴󠁡󠁺󠁩󠁭󠁩󠁿 Flag for Imishli (AZ-IMI) 🏴󠁴󠁭󠁳󠁿 Flag for Aşgabat (TM-S) 👨‍❤️‍👩🏾 Couple With Heart - Man, Woman: Medium-Dark Skin Tone 🏴󠁬󠁡󠁸󠁥󠁿 Flag for Sekong (LA-XE) 🏴󠁲󠁯󠁧󠁪󠁿 Flag for Gorj (RO-GJ) 👨🏻‍❤️‍👨 Couple With Heart - Man: Light Skin Tone, Man 🏴󠁡󠁺󠁫󠁵󠁲󠁿 Flag for Kurdamir (AZ-KUR) 👩🏻‍👨🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁡󠁺󠁫󠁡󠁬󠁿 Flag for Kalbajar (AZ-KAL) 🏴󠁡󠁺󠁧󠁡󠁤󠁿 Flag for Gadabay (AZ-GAD) 🏴󠁡󠁺󠁬󠁡󠁣󠁿 Flag for Lachin (AZ-LAC) 🏴󠁡󠁺󠁬󠁡󠁿 Flag for Lankaran (AZ-LA) 🏴󠁶󠁮󠁳󠁧󠁿 Flag for Ho Chi Minh City (VN-SG) 🏴󠁡󠁺󠁬󠁥󠁲󠁿 Flag for Lerik (AZ-LER) 🏴󠁡󠁺󠁭󠁩󠁿 Flag for Mingachevir (AZ-MI) 👩🏾‍👨🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁡󠁺󠁮󠁡󠁿 Flag for Naftalan (AZ-NA) 🏴󠁡󠁺󠁭󠁡󠁳󠁿 Flag for Masally (AZ-MAS) 👨‍❤️‍👩 Couple With Heart - Man, Woman 🏴󠁡󠁺󠁬󠁡󠁮󠁿 Flag for Lankaran District (AZ-LAN) 👩🏼‍👨🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍❤️‍💋‍👨🏾 Kiss - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁡󠁺󠁮󠁥󠁦󠁿 Flag for Neftchala (AZ-NEF) 🏴󠁡󠁺󠁮󠁸󠁿 Flag for Nakhchivan AR (AZ-NX) 🏴󠁳󠁩󠀰󠀱󠀱󠁿 Flag for Celje (SI-011) 🏴󠁬󠁴󠀳󠀲󠁿 Flag for Panevėžio Municipality (LT-32) 👩🏿‍❤️‍💋‍👩🏽 Kiss - Woman: Dark Skin Tone, Woman: Medium Skin Tone 👨🏻‍❤️‍👩🏿 Couple With Heart - Man: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁺󠁩󠁳󠁭󠁿 Flag for Ismailli (AZ-ISM) 󠁨 Tag Latin Small Letter H 👩🏾‍❤️‍👨🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone 👩🏻‍👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone 🏴󠁣󠁦󠁮󠁭󠁿 Flag for Nana-Mambéré (CF-NM) 🏴󠁡󠁺󠁱󠁯󠁢󠁿 Flag for Gobustan (AZ-QOB) 👩🏿‍❤️‍💋‍👨🏻 Kiss - Woman: Dark Skin Tone, Man: Light Skin Tone 👩🏿‍❤️‍💋‍👩🏿 Kiss - Woman: Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁺󠁱󠁢󠁩󠁿 Flag for Qubadli (AZ-QBI) 🏴󠁡󠁺󠁱󠁡󠁺󠁿 Flag for Qazakh (AZ-QAZ) 🏴󠁲󠁯󠁢󠁶󠁿 Flag for Braşov (RO-BV) 👨‍👩‍👧‍👶 Family: Man, Woman, Girl, Baby 🏴󠁡󠁺󠁱󠁢󠁡󠁿 Flag for Quba (AZ-QBA) 🏴󠁡󠁺󠁱󠁡󠁢󠁿 Flag for Qabala (AZ-QAB) 🏴󠁣󠁨󠁵󠁲󠁿 Flag for Uri (CH-UR) 🏴󠁡󠁺󠁯󠁧󠁵󠁿 Flag for Oghuz (AZ-OGU) 🏴󠁡󠁺󠁱󠁡󠁸󠁿 Flag for Qakh (AZ-QAX) 🏴󠁳󠁩󠀲󠀰󠀶󠁿 Flag for Šmarješke Toplice (SI-206) 👨🏾‍❤️‍💋‍👩🏿 Kiss - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁧󠀰󠀷󠁿 Flag for Saint Peter (AG-07) 👨🏻‍👩🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁬󠁲󠁭󠁹󠁿 Flag for Maryland (LR-MY) 🏴󠁡󠁵󠁳󠁡󠁿 Flag for South Australia (AU-SA) 🏴󠁡󠁺󠁱󠁵󠁳󠁿 Flag for Qusar (AZ-QUS) 🏴󠁡󠁺󠁳󠁡󠁢󠁿 Flag for Sabirabad (AZ-SAB) 👨‍❤️‍👩🏽 Couple With Heart - Man, Woman: Medium Skin Tone 👨‍❤️‍👩🏼 Couple With Heart - Man, Woman: Medium-Light Skin Tone 🏴󠁡󠁺󠁳󠁡󠁴󠁿 Flag for Saatly (AZ-SAT) 🏴󠁡󠁺󠁳󠁢󠁮󠁿 Flag for Shabran (AZ-SBN) 👨🏼‍❤️‍👩🏽 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone 🏴󠁡󠁺󠁳󠁡󠁫󠁿 Flag for Shaki District (AZ-SAK) 🏴󠁣󠁯󠁣󠁡󠁳󠁿 Flag for Casanare (CO-CAS) 👨‍👩‍👶‍👶 Family: Man, Woman, Baby, Baby 🏴󠁡󠁺󠁳󠁲󠁿 Flag for Shirvan (AZ-SR) 🏴󠁡󠁺󠁳󠁵󠁳󠁿 Flag for Shusha (AZ-SUS) 🏴󠁣󠁨󠁶󠁳󠁿 Flag for Valais (CH-VS) 👩🏽‍👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone 👩🏻‍❤️‍💋‍👨🏿 Kiss - Woman: Light Skin Tone, Man: Dark Skin Tone 🏴󠁡󠁺󠁳󠁡󠁿 Flag for Shaki (AZ-SA) 🏴󠁦󠁲󠁭󠁱󠁿 Flag for Martinique (FR-MQ) 🏴󠁡󠁺󠁳󠁭󠁿 Flag for Sumqayit (AZ-SM) 🏴󠁡󠁺󠁳󠁩󠁹󠁿 Flag for Siazan (AZ-SIY) 🏴󠁡󠁺󠁳󠁭󠁩󠁿 Flag for Shamakhi (AZ-SMI) 👩🏿‍❤️‍💋‍👨 Kiss - Woman: Dark Skin Tone, Man 🏴󠁡󠁺󠁳󠁭󠁸󠁿 Flag for Samukh (AZ-SMX) 👨🏻‍👩🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 🏴󠁡󠁺󠁴󠁯󠁶󠁿 Flag for Tovuz (AZ-TOV) 🏴󠁡󠁺󠁸󠁡󠁣󠁿 Flag for Khachmaz (AZ-XAC) 🏴󠁡󠁺󠁵󠁣󠁡󠁿 Flag for Ujar (AZ-UCA) 🏴󠁡󠁺󠁴󠁡󠁲󠁿 Flag for Tartar (AZ-TAR) 👨🏿‍❤️‍💋‍👨🏻 Kiss - Man: Dark Skin Tone, Man: Light Skin Tone 👩🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👩🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁡󠁺󠁸󠁩󠁺󠁿 Flag for Khizi (AZ-XIZ) 👨🏽‍❤️‍👨🏼 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁡󠁺󠁸󠁣󠁩󠁿 Flag for Khojali (AZ-XCI) 🏴󠁶󠁥󠁹󠁿 Flag for Delta Amacuro (VE-Y) 🏴󠁡󠁺󠁸󠁡󠁿 Flag for Stepanakert (AZ-XA) 🏴󠁡󠁺󠁹󠁡󠁲󠁿 Flag for Yardymli (AZ-YAR) 🏴󠁡󠁺󠁹󠁥󠁶󠁿 Flag for Yevlakh District (AZ-YEV) 🏴󠁡󠁺󠁺󠁡󠁱󠁿 Flag for Zaqatala (AZ-ZAQ) 👩🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁡󠁺󠁹󠁥󠁿 Flag for Yevlakh (AZ-YE) 🏴󠁢󠁡󠁢󠁩󠁨󠁿 Flag for Federation of Bosnia and Herzegovina (BA-BIH) 🏴󠁡󠁺󠁺󠁡󠁲󠁿 Flag for Zardab (AZ-ZAR) 🏴󠁡󠁺󠁳󠁡󠁬󠁿 Flag for Salyan (AZ-SAL) 🏴󠁣󠁨󠁺󠁧󠁿 Flag for Zug (CH-ZG) 👨🏾‍👩🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👩🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏿‍👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁡󠁳󠁲󠁰󠁿 Flag for Republika Srpska (BA-SRP) 👨🏽‍❤️‍👩 Couple With Heart - Man: Medium Skin Tone, Woman 👨🏻‍👩🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone 🏴󠁥󠁳󠁡󠁮󠁿 Flag for Andalusia (ES-AN) 👨🏼‍👩🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁢󠁢󠀰󠀴󠁿 Flag for Saint James (BB-04) 👨🏾‍❤️‍👩🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁢󠀰󠀳󠁿 Flag for Saint George (BB-03) 🏴󠁢󠁢󠀰󠀲󠁿 Flag for Saint Andrew (BB-02) 👨‍👩‍👶‍👦 Family: Man, Woman, Baby, Boy 👨🏽‍👩🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁢󠁢󠀰󠀵󠁿 Flag for Saint John (BB-05) 👨🏾‍👩🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁢󠀰󠀶󠁿 Flag for Saint Joseph (BB-06) 🏴󠁬󠁫󠀱󠁿 Flag for Western (LK-1) 🏴󠁢󠁹󠁢󠁲󠁿 Flag for Brest (BY-BR) 🏴󠁡󠁺󠁳󠁫󠁲󠁿 Flag for Shamkir (AZ-SKR) 🏴󠁢󠁢󠀰󠀷󠁿 Flag for Saint Lucy (BB-07) 👩🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 🏴󠁥󠁳󠁣󠁭󠁿 Flag for Castile-La Mancha (ES-CM) 🏴󠁢󠁢󠀱󠀰󠁿 Flag for Saint Philip (BB-10) 🏴󠁶󠁣󠀰󠀴󠁿 Flag for Saint George (VC-04) 👨🏻‍👩🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁤󠁡󠁿 Flag for Barisal (BD-A) 🏴󠁡󠁺󠁺󠁡󠁮󠁿 Flag for Zangilan (AZ-ZAN) 🏴󠁪󠁭󠀰󠀱󠁿 Flag for Kingston (JM-01) 👨🏼‍👩🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁢󠁤󠁥󠁿 Flag for Rajshahi Division (BD-E) 🏴󠁢󠁤󠁦󠁿 Flag for Rangpur Division (BD-F) 🏴󠁢󠁤󠁣󠁿 Flag for Dhaka Division (BD-C) 🏴󠁢󠁤󠁤󠁿 Flag for Khulna Division (BD-D) 🏴󠁢󠁢󠀰󠀹󠁿 Flag for Saint Peter (BB-09) 🏴󠁳󠁩󠀰󠀵󠀸󠁿 Flag for Lenart (SI-058) 👩🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁢󠁦󠀰󠀲󠁿 Flag for Cascades (BF-02) 🏴󠁢󠁤󠁨󠁿 Flag for Mymensingh Division (BD-H) 🏴󠁢󠁥󠁷󠁡󠁬󠁿 Flag for Wallonia (BE-WAL) 🏴󠁭󠁵󠁢󠁲󠁿 Flag for Beau-Bassin Rose-Hill (MU-BR) 🏴󠁢󠁦󠀰󠀴󠁿 Flag for Centre-Est (BF-04) 🏴󠁣󠁮󠀹󠀱󠁿 Flag for Hong Kong SAR China (CN-91) 🏴󠁢󠁦󠀰󠀱󠁿 Flag for Boucle du Mouhoun (BF-01) 🏴󠁢󠁦󠀰󠀳󠁿 Flag for Centre (BF-03) 🏴󠁤󠁫󠀸󠀲󠁿 Flag for Central Denmark (DK-82) 🏴󠁢󠁦󠀰󠀷󠁿 Flag for Centre-Sud (BF-07) 👨🏽‍👩🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁢󠁦󠀰󠀶󠁿 Flag for Centre-Ouest (BF-06) 🏴󠁢󠁦󠀰󠀵󠁿 Flag for Centre-Nord (BF-05) 🏴󠁢󠁢󠀰󠀸󠁿 Flag for Saint Michael (BB-08) 🏴󠁢󠁢󠀱󠀱󠁿 Flag for Saint Thomas (BB-11) 👨🏽‍❤️‍👩🏿 Couple With Heart - Man: Medium Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁦󠀰󠀸󠁿 Flag for Est (BF-08) 🏴󠁢󠁥󠁢󠁲󠁵󠁿 Flag for Brussels (BE-BRU) 🏴󠁢󠁤󠁧󠁿 Flag for Sylhet Division (BD-G) 🏴󠁢󠁦󠀱󠀱󠁿 Flag for Plateau-Central (BF-11) 🏴󠁢󠁤󠁢󠁿 Flag for Chittagong Division (BD-B) 🏴󠁢󠁦󠀱󠀳󠁿 Flag for Sud-Ouest (BF-13) 👨🏾‍👩🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁧󠀰󠀵󠁿 Flag for Vidin (BG-05) 🏴󠁢󠁧󠀰󠀳󠁿 Flag for Varna (BG-03) 👨🏿‍❤️‍👩🏽 Couple With Heart - Man: Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁧󠀰󠀲󠁿 Flag for Burgas (BG-02) 🏴󠁢󠁦󠀱󠀰󠁿 Flag for Nord (BF-10) 🏴󠁢󠁧󠀰󠀴󠁿 Flag for Veliko Tarnovo (BG-04) 👨🏽‍👩🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁧󠀰󠀷󠁿 Flag for Gabrovo (BG-07) 👨🏿‍👩🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁧󠀰󠀸󠁿 Flag for Dobrich (BG-08) 🏴󠁢󠁦󠀱󠀲󠁿 Flag for Sahel (BF-12) 🏴󠁡󠁵󠁴󠁡󠁳󠁿 Flag for Tasmania (AU-TAS) 👨🏿‍❤️‍👩🏻 Couple With Heart - Man: Dark Skin Tone, Woman: Light Skin Tone 👩🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 👨🏻‍👩🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👩🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👩🏾 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁺󠁸󠁶󠁤󠁿 Flag for Khojavend (AZ-XVD) 🏴󠁢󠁧󠀱󠀱󠁿 Flag for Lovech (BG-11) 🏴󠁣󠁬󠁬󠁩󠁿 Flag for Libertador General Bernardo O’Higgins (CL-LI) 🏴󠁢󠁧󠀱󠀳󠁿 Flag for Pazardzhik (BG-13) 👨🏿‍❤️‍👩🏿 Couple With Heart - Man: Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁧󠀱󠀴󠁿 Flag for Pernik (BG-14) 🏴󠁢󠁧󠀱󠀰󠁿 Flag for Kyustendil (BG-10) 🏴󠁥󠁧󠁢󠁡󠁿 Flag for Red Sea (EG-BA) 🏴󠁴󠁺󠀱󠀱󠁿 Flag for Zanzibar Central/South (TZ-11) 👨🏿‍👩🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁧󠀱󠀵󠁿 Flag for Pleven (BG-15) 👨🏿‍👨🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👨🏽‍👩🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁧󠀲󠀱󠁿 Flag for Smolyan (BG-21) 🏴󠁢󠁧󠀰󠀱󠁿 Flag for Blagoevgrad (BG-01) 🏴󠁤󠁺󠀳󠀴󠁿 Flag for Bordj Bou Arréridj (DZ-34) 🏴󠁢󠁧󠀱󠀶󠁿 Flag for Plovdiv (BG-16) 🏴󠁣󠁩󠁶󠁢󠁿 Flag for Vallée du Bandama (CI-VB) 🏴󠁢󠁧󠀱󠀹󠁿 Flag for Silistra (BG-19) 👩‍❤️‍👨🏼 Couple With Heart - Woman, Man: Medium-Light Skin Tone 🏴󠁢󠁧󠀱󠀷󠁿 Flag for Razgrad (BG-17) 👨🏾‍❤️‍👨 Couple With Heart - Man: Medium-Dark Skin Tone, Man 🏴󠁡󠁯󠁣󠁮󠁮󠁿 Flag for Cunene (AO-CNN) 🏴󠁢󠁧󠀲󠀰󠁿 Flag for Sliven (BG-20) 🧕🏻‍♀️ Woman With Headscarf: Light Skin Tone 🏴󠁢󠁧󠀲󠀵󠁿 Flag for Targovishte (BG-25) 👩🏼‍👩🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏾‍👩🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁧󠀲󠀳󠁿 Flag for Sofia District (BG-23) 🏴󠁢󠁧󠀲󠀲󠁿 Flag for Sofia (BG-22) 👨🏿‍👩🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍❤️‍💋‍👩🏾 Kiss - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone 🧕🏽‍♀️ Woman With Headscarf: Medium Skin Tone 🏴󠁢󠁧󠀲󠀸󠁿 Flag for Yambol (BG-28) 🏴󠁢󠁨󠀱󠀳󠁿 Flag for Capital (BH-13) 🏴󠁢󠁧󠀲󠀶󠁿 Flag for Haskovo (BG-26) 🏴󠁬󠁩󠀰󠀷󠁿 Flag for Schaan (LI-07) 👨🏿‍👩🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁨󠀱󠀵󠁿 Flag for Muharraq (BH-15) 🏴󠁢󠁨󠀱󠀴󠁿 Flag for Southern (BH-14) 🧕🏾‍♀️ Woman With Headscarf: Medium-Dark Skin Tone 🏴󠁲󠁯󠁳󠁢󠁿 Flag for Sibiu (RO-SB) 🧕🏼‍♀️ Woman With Headscarf: Medium-Light Skin Tone 👩🏻‍❤️‍👨🏿 Couple With Heart - Woman: Light Skin Tone, Man: Dark Skin Tone 🏴󠁢󠁨󠀱󠀷󠁿 Flag for Northern (BH-17) 🏴󠁢󠁩󠁢󠁢󠁿 Flag for Bubanza (BI-BB) 👩🏻‍❤️‍👩 Couple With Heart - Woman: Light Skin Tone, Woman 🏴󠁢󠁥󠁶󠁬󠁧󠁿 Flag for Flanders (BE-VLG) 👩🏽‍👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone 👨🏻‍👩🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁢󠁩󠁢󠁭󠁿 Flag for Bujumbura (BI-BM) 🧕🏿‍♀️ Woman With Headscarf: Dark Skin Tone 🏴󠁢󠁩󠁢󠁬󠁿 Flag for Bujumbura Rural (BI-BL) 👨🏾‍❤️‍💋‍👩🏽 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone 👨🏼‍👩🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏻‍👨🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 🏴󠁢󠁩󠁣󠁡󠁿 Flag for Cankuzo (BI-CA) 🏴󠁢󠁧󠀱󠀲󠁿 Flag for Montana (BG-12) 🏴󠁬󠁶󠀰󠀸󠀵󠁿 Flag for Sala (LV-085) ⃣ Combining Enclosing Keycap 🏴󠁢󠁩󠁢󠁲󠁿 Flag for Bururi (BI-BR) 🏴󠁢󠁧󠀰󠀹󠁿 Flag for Kardzhali (BG-09) 🏴󠁢󠁩󠁲󠁭󠁿 Flag for Rumonge (BI-RM) 🏴󠁮󠁬󠁡󠁷󠁿 Flag for Aruba (NL-AW) 🏴󠁢󠁩󠁭󠁹󠁿 Flag for Muyinga (BI-MY) 🏴󠁢󠁩󠁲󠁴󠁿 Flag for Rutana (BI-RT) 🏴󠁢󠁩󠁲󠁹󠁿 Flag for Ruyigi (BI-RY) 🏴󠁢󠁩󠁫󠁩󠁿 Flag for Kirundo (BI-KI) 🏴󠁢󠁩󠁫󠁹󠁿 Flag for Kayanza (BI-KY) 🏴󠁢󠁩󠁭󠁷󠁿 Flag for Mwaro (BI-MW) 🏴󠁢󠁧󠀲󠀷󠁿 Flag for Shumen (BG-27) 🏴󠁢󠁩󠁮󠁧󠁿 Flag for Ngozi (BI-NG) 🏴󠁢󠁩󠁫󠁲󠁿 Flag for Karuzi (BI-KR) 🏴󠁢󠁩󠁭󠁵󠁿 Flag for Muramvya (BI-MU) 🏴󠁭󠁡󠀱󠀵󠁿 Flag for Laâyoune-Boujdour-Sakia El Hamra (MA-15) 👨🏽‍👩🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏾‍👩🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁪󠁤󠁯󠁿 Flag for Donga (BJ-DO) 👩🏽‍👨🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👨🏽‍❤️‍💋‍👩🏼 Kiss - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁦󠁲󠁨󠁤󠁦󠁿 Flag for Hauts-de-France (FR-HDF) 🏴󠁢󠁪󠁡󠁬󠁿 Flag for Alibori (BJ-AL) 🏴󠁢󠁪󠁡󠁫󠁿 Flag for Atakora (BJ-AK) 👨🏿‍👩🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁪󠁬󠁩󠁿 Flag for Littoral (BJ-LI) 🏴󠁢󠁪󠁢󠁯󠁿 Flag for Borgou (BJ-BO) 👩‍👩‍👧‍👶 Family: Woman, Woman, Girl, Baby 🏴󠁵󠁳󠁮󠁤󠁿 Flag for North Dakota (US-ND) 👨🏼‍❤️‍💋‍👨🏾 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁪󠁫󠁯󠁿 Flag for Kouffo (BJ-KO) 🏴󠁢󠁪󠁰󠁬󠁿 Flag for Plateau (BJ-PL) 🏴󠁧󠁤󠀱󠀰󠁿 Flag for Carriacou and Petite Martinique (GD-10) 🏴󠁢󠁪󠁺󠁯󠁿 Flag for Zou (BJ-ZO) 👩🏼‍❤️‍👨🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍👨🏽 Couple With Heart - Woman: Medium Skin Tone, Man: Medium Skin Tone 👨🏽‍❤️‍👩🏼 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone 👩🏽‍❤️‍👨🏻 Couple With Heart - Woman: Medium Skin Tone, Man: Light Skin Tone 🏴󠁬󠁢󠁢󠁩󠁿 Flag for Beqaa (LB-BI) 🏴󠁢󠁮󠁴󠁥󠁿 Flag for Temburong (BN-TE) 👩🏻‍👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁮󠁴󠁵󠁿 Flag for Tutong (BN-TU) 🏴󠁢󠁮󠁢󠁭󠁿 Flag for Brunei-Muara (BN-BM) 👨🏻‍👩🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁧󠀰󠀶󠁿 Flag for Vratsa (BG-06) 👩🏽‍❤️‍👨🏼 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁯󠁢󠁿 Flag for Beni (BO-B) 🏴󠁢󠁮󠁢󠁥󠁿 Flag for Belait (BN-BE) 👩🏼‍❤️‍👨 Couple With Heart - Woman: Medium-Light Skin Tone, Man 🏴󠁢󠁪󠁯󠁵󠁿 Flag for Ouémé (BJ-OU) 👩🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁳󠁣󠀲󠀵󠁿 Flag for Roche Caiman (SC-25) 👩🏻‍❤️‍👨🏾 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁯󠁣󠁿 Flag for Cochabamba (BO-C) 👨🏾‍👩🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁯󠁮󠁿 Flag for Pando (BO-N) 👩🏽‍❤️‍👩🏻 Couple With Heart - Woman: Medium Skin Tone, Woman: Light Skin Tone 👩🏾‍❤️‍👨🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁯󠁨󠁿 Flag for Chuquisaca (BO-H) 🏴󠁢󠁯󠁬󠁿 Flag for La Paz (BO-L) 🏴󠁭󠁮󠀰󠀳󠀹󠁿 Flag for Khentii (MN-039) 🕴🏽‍♀️ Woman in Business Suit Levitating: Medium Skin Tone 🏴󠁭󠁫󠀲󠀷󠁿 Flag for Dolneni (MK-27) 🏴󠁢󠁧󠀲󠀴󠁿 Flag for Stara Zagora (BG-24) 👩🏽‍👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁩󠁲󠀱󠀳󠁿 Flag for Sistan and Baluchestan (IR-13) 👩🏾‍❤️‍👨🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁯󠁰󠁿 Flag for Potosí (BO-P) 🏴󠁢󠁱󠁢󠁯󠁿 Flag for Bonaire (BQ-BO) 👩‍❤️‍💋‍👨🏻 Kiss - Woman, Man: Light Skin Tone 👩🏾‍❤️‍👨 Couple With Heart - Woman: Medium-Dark Skin Tone, Man 👩🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁢󠁡󠁢󠁲󠁣󠁿 Flag for Brčko District (BA-BRC) 🏴󠁢󠁱󠁳󠁡󠁿 Flag for Saba (BQ-SA) 👩🏽‍❤️‍👨🏾 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩🏾‍❤️‍👨🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone 🏴󠁢󠁲󠁡󠁣󠁿 Flag for Acre (BR-AC) 🏴󠁢󠁩󠁧󠁩󠁿 Flag for Gitega (BI-GI) 👩🏿‍👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone 👩🏿‍❤️‍👨🏻 Couple With Heart - Woman: Dark Skin Tone, Man: Light Skin Tone 🏴󠁢󠁲󠁡󠁭󠁿 Flag for Amazonas (BR-AM) 🏴󠁡󠁲󠁣󠁿 Flag for Buenos Aires (AR-C) 👨🏼‍👩🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏼‍❤️‍💋‍👨🏼 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁲󠁥󠁳󠁿 Flag for Espírito Santo (BR-ES) 👨🏿‍❤️‍💋‍👨🏾 Kiss - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👨🏽 Kiss - Man: Medium-Light Skin Tone, Man: Medium Skin Tone 👩🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏻‍❤️‍👩 Couple With Heart - Man: Light Skin Tone, Woman 👨🏿‍❤️‍💋‍👩🏾 Kiss - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏽 Kiss - Woman: Light Skin Tone, Woman: Medium Skin Tone 👨🏼‍❤️‍💋‍👨🏿 Kiss - Man: Medium-Light Skin Tone, Man: Dark Skin Tone 👩🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏿‍❤️‍👩🏼 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁲󠁭󠁡󠁿 Flag for Maranhão (BR-MA) 👩🏿‍❤️‍👩🏽 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium Skin Tone 👩🏿‍❤️‍👩 Couple With Heart - Woman: Dark Skin Tone, Woman 🏴󠁢󠁲󠁡󠁰󠁿 Flag for Amapá (BR-AP) 👨🏽‍❤️‍👨🏻 Couple With Heart - Man: Medium Skin Tone, Man: Light Skin Tone 👩🏻‍❤️‍💋‍👨🏻 Kiss - Woman: Light Skin Tone, Man: Light Skin Tone 👨🏽‍❤️‍💋‍👨🏽 Kiss - Man: Medium Skin Tone, Man: Medium Skin Tone 👩🏿‍❤️‍💋‍👩🏻 Kiss - Woman: Dark Skin Tone, Woman: Light Skin Tone 👨🏽‍❤️‍💋‍👩🏿 Kiss - Man: Medium Skin Tone, Woman: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏾 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏿‍❤️‍💋‍👨🏼 Kiss - Man: Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👨🏿 Kiss - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone 👩🏽‍❤️‍💋‍👩🏿 Kiss - Woman: Medium Skin Tone, Woman: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏿 Kiss - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone 👨🏽‍❤️‍💋‍👩🏽 Kiss - Man: Medium Skin Tone, Woman: Medium Skin Tone 👨🏾‍❤️‍💋‍👨🏼 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏽‍❤️‍💋‍👩🏻 Kiss - Man: Medium Skin Tone, Woman: Light Skin Tone 👨🏾‍❤️‍💋‍👨 Kiss - Man: Medium-Dark Skin Tone, Man 👨🏾‍❤️‍💋‍👨🏾 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍💋‍👨🏾 Kiss - Woman, Man: Medium-Dark Skin Tone 👩‍❤️‍💋‍👩🏻 Kiss - Woman, Woman: Light Skin Tone 👩🏽‍❤️‍💋‍👨🏻 Kiss - Woman: Medium Skin Tone, Man: Light Skin Tone 👩🏿‍❤️‍💋‍👨🏿 Kiss - Woman: Dark Skin Tone, Man: Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏿 Kiss - Woman: Light Skin Tone, Woman: Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏼 Kiss - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏾‍❤️‍💋‍👩🏿 Kiss - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone 👩🏾‍❤️‍💋‍👩 Kiss - Woman: Medium-Dark Skin Tone, Woman 👩🏾‍❤️‍💋‍👩🏻 Kiss - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone 👩🏻‍❤️‍👨 Couple With Heart - Woman: Light Skin Tone, Man 👩🏻‍👩🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone 👩🏾‍❤️‍💋‍👨🏾 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏻‍❤️‍👨🏽 Couple With Heart - Man: Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁲󠁭󠁴󠁿 Flag for Mato Grosso (BR-MT) 👨🏽‍❤️‍👩🏻 Couple With Heart - Man: Medium Skin Tone, Woman: Light Skin Tone 👨‍❤️‍👨🏿 Couple With Heart - Man, Man: Dark Skin Tone 👩🏿‍❤️‍💋‍👨🏼 Kiss - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone 👩🏿‍❤️‍💋‍👩🏾 Kiss - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁯󠁳󠁿 Flag for Santa Cruz (BO-S) 👨🏻‍❤️‍👩🏽 Couple With Heart - Man: Light Skin Tone, Woman: Medium Skin Tone 👨🏽‍❤️‍👩🏽 Couple With Heart - Man: Medium Skin Tone, Woman: Medium Skin Tone 👩🏾‍❤️‍💋‍👩🏽 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁪󠁣󠁯󠁿 Flag for Collines (BJ-CO) 👨🏻‍👩🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone 👨‍❤️‍👨🏽 Couple With Heart - Man, Man: Medium Skin Tone 👨🏾‍👩🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏼‍❤️‍👨 Couple With Heart - Man: Medium-Light Skin Tone, Man 👨🏾‍❤️‍👩🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁲󠁰󠁡󠁿 Flag for Pará (BR-PA) 👩🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👨🏼‍❤️‍👨🏼 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 👨🏿‍❤️‍👨🏻 Couple With Heart - Man: Dark Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍👩🏽 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium Skin Tone 👨🏾‍❤️‍👨🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone 👨🏽‍❤️‍👨🏽 Couple With Heart - Man: Medium Skin Tone, Man: Medium Skin Tone 👨🏻‍❤️‍👩🏼 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏾‍❤️‍👩🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone 👨🏾‍❤️‍👨🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 👩🏾‍❤️‍💋‍👩🏾 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏼‍❤️‍👩🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone 👨🏿‍❤️‍👩🏼 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👨🏾 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍👨🏾 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍👨🏾 Couple With Heart - Woman, Man: Medium-Dark Skin Tone 🏴󠁢󠁲󠁡󠁬󠁿 Flag for Alagoas (BR-AL) 👩‍❤️‍👨🏻 Couple With Heart - Woman, Man: Light Skin Tone 🏴󠁢󠁦󠀰󠀹󠁿 Flag for Hauts-Bassins (BF-09) 👨🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍❤️‍👩🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁢󠁲󠁲󠁪󠁿 Flag for Rio de Janeiro (BR-RJ) 👨🏾‍❤️‍💋‍👩🏻 Kiss - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁲󠁲󠁯󠁿 Flag for Rondônia (BR-RO) 👨🏾‍❤️‍👨🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone 👨🏽‍👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone 👨🏼‍❤️‍👨🏽 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁲󠁰󠁩󠁿 Flag for Piauí (BR-PI) 👨🏽‍👩🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁢󠁲󠁲󠁮󠁿 Flag for Rio Grande do Norte (BR-RN) 👩🏻‍❤️‍👨🏻 Couple With Heart - Woman: Light Skin Tone, Man: Light Skin Tone 👨🏻‍👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍❤️‍👩🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏿‍❤️‍👩🏾 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁢󠁲󠁳󠁥󠁿 Flag for Sergipe (BR-SE) 🏴󠁢󠁲󠁰󠁲󠁿 Flag for Paraná (BR-PR) 👨🏿‍👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone 👩🏼‍❤️‍👩🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone 👩🏾‍❤️‍👩🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁲󠁵󠁭󠁯󠁳󠁿 Flag for Moscow Province (RU-MOS) 👩🏽‍❤️‍💋‍👩🏽 Kiss - Woman: Medium Skin Tone, Woman: Medium Skin Tone 👩🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁲󠁳󠁰󠁿 Flag for São Paulo (BR-SP) 🏴󠁩󠁲󠀰󠀱󠁿 Flag for East Azerbaijan (IR-01) 🏴󠁢󠁲󠁲󠁳󠁿 Flag for Rio Grande do Sul (BR-RS) 👩🏼‍❤️‍👨🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone 🏴󠁮󠁯󠀱󠀴󠁿 Flag for Sogn og Fjordane (NO-14) 🏴󠁢󠁲󠁴󠁯󠁿 Flag for Tocantins (BR-TO) 🏴󠁳󠁩󠀱󠀸󠀲󠁿 Flag for Sveti Andraž v Slovenskih Goricah (SI-182) 👨🏼‍❤️‍👩🏻 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Light Skin Tone 👨🏿‍❤️‍👨🏽 Couple With Heart - Man: Dark Skin Tone, Man: Medium Skin Tone 👨🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👨🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁳󠁢󠁩󠁿 Flag for Bimini (BS-BI) 👨🏿‍❤️‍👩 Couple With Heart - Man: Dark Skin Tone, Woman 👩🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁲󠁲󠁲󠁿 Flag for Roraima (BR-RR) 🏴󠁢󠁯󠁯󠁿 Flag for Oruro (BO-O) 🏴󠁢󠁳󠁥󠁸󠁿 Flag for Exuma (BS-EX) 👨🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏽‍❤️‍👨 Couple With Heart - Woman: Medium Skin Tone, Man 🏴󠁢󠁳󠁣󠁥󠁿 Flag for Central Eleuthera (BS-CE) 🏴󠁢󠁳󠁢󠁹󠁿 Flag for Berry Islands (BS-BY) 🏴󠁢󠁩󠁭󠁡󠁿 Flag for Makamba (BI-MA) 🏴󠁢󠁲󠁤󠁦󠁿 Flag for Federal District (BR-DF) 👩🏻‍❤️‍👩🏾 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏼 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁳󠁣󠁯󠁿 Flag for Central Abaco (BS-CO) 🏴󠁢󠁳󠁥󠁧󠁿 Flag for East Grand Bahama (BS-EG) 🏴󠁢󠁳󠁣󠁳󠁿 Flag for Central Andros (BS-CS) 👨🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁳󠁣󠁫󠁿 Flag for Crooked Island (BS-CK) 🏴󠁢󠁳󠁢󠁰󠁿 Flag for Black Point (BS-BP) 👨🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍❤️‍👨🏾 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone 👩🏾‍❤️‍💋‍👨🏼 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁮󠁥󠁿 Flag for North Eleuthera (BS-NE) 🏴󠁢󠁳󠁮󠁯󠁿 Flag for North Abaco (BS-NO) 🏴󠁢󠁳󠁭󠁧󠁿 Flag for Mayaguana (BS-MG) 👨🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏻 Kiss - Man: Medium-Light Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁳󠁧󠁣󠁿 Flag for Grand Cay (BS-GC) 🏴󠁢󠁳󠁦󠁰󠁿 Flag for Freeport (BS-FP) 🏴󠁢󠁳󠁩󠁮󠁿 Flag for Inagua (BS-IN) 🏴󠁢󠁳󠁨󠁴󠁿 Flag for Hope Town (BS-HT) 👩🏾‍❤️‍👩🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁳󠁬󠁩󠁿 Flag for Long Island (BS-LI) 👨🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👨🏾‍❤️‍👩 Couple With Heart - Man: Medium-Dark Skin Tone, Woman 👩🏿‍❤️‍👨🏿 Couple With Heart - Woman: Dark Skin Tone, Man: Dark Skin Tone 👨🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👨‍👨‍👶 Family: Man, Man, Baby 👩‍👧‍👶 Family: Woman, Girl, Baby 👨‍👦‍👶 Family: Man, Boy, Baby 👨‍👨‍👶‍👦 Family: Man, Man, Baby, Boy 👨‍👦‍👧 Family: Man, Boy, Girl 👨‍👶‍👶 Family: Man, Baby, Baby 🏴󠁢󠁳󠁲󠁩󠁿 Flag for Ragged Island (BS-RI) 👩🏿‍❤️‍👩🏿 Couple With Heart - Woman: Dark Skin Tone, Woman: Dark Skin Tone 👩🏿‍❤️‍👨🏽 Couple With Heart - Woman: Dark Skin Tone, Man: Medium Skin Tone 👩🏼‍❤️‍👨🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁮󠁳󠁿 Flag for North Andros (BS-NS) 👩🏿‍❤️‍👩🏻 Couple With Heart - Woman: Dark Skin Tone, Woman: Light Skin Tone 👨🏻‍❤️‍💋‍👨 Kiss - Man: Light Skin Tone, Man 🏴󠁢󠁳󠁳󠁡󠁿 Flag for South Andros (BS-SA) 👨🏻‍❤️‍💋‍👨🏼 Kiss - Man: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁳󠁥󠁿 Flag for South Eleuthera (BS-SE) 👨🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏻‍❤️‍💋‍👩🏻 Kiss - Man: Light Skin Tone, Woman: Light Skin Tone 👨🏼‍❤️‍💋‍👩🏾 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏾‍❤️‍💋‍👩🏼 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👨🏻 Kiss - Man: Medium-Dark Skin Tone, Man: Light Skin Tone 🏴󠁢󠁲󠁳󠁣󠁿 Flag for Santa Catarina (BR-SC) 👩‍👩‍👦‍👧 Family: Woman, Woman, Boy, Girl 👨‍❤️‍💋‍👩🏾 Kiss - Man, Woman: Medium-Dark Skin Tone 🏴󠁢󠁳󠁲󠁣󠁿 Flag for Rum Cay (BS-RC) 👩‍👩‍👶‍👦 Family: Woman, Woman, Baby, Boy 👨🏻‍❤️‍💋‍👩🏽 Kiss - Man: Light Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁳󠁣󠁩󠁿 Flag for Cat Island (BS-CI) 👩🏽‍❤️‍👩 Couple With Heart - Woman: Medium Skin Tone, Woman 👨🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩‍👨‍👦‍👶 Family: Woman, Man, Boy, Baby 👨🏾‍❤️‍💋‍👩 Kiss - Man: Medium-Dark Skin Tone, Woman 👨‍❤️‍💋‍👨🏻 Kiss - Man, Man: Light Skin Tone 👨🏻‍❤️‍💋‍👨🏿 Kiss - Man: Light Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏽 Kiss - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone 👨🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁳󠁳󠁯󠁿 Flag for South Abaco (BS-SO) 👩🏾‍❤️‍💋‍👩🏼 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏻‍❤️‍👨🏿 Couple With Heart - Man: Light Skin Tone, Man: Dark Skin Tone 👨🏿‍❤️‍💋‍👨🏿 Kiss - Man: Dark Skin Tone, Man: Dark Skin Tone 👩🏾‍❤️‍💋‍👨🏿 Kiss - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏽 Kiss - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone 👩🏾‍❤️‍💋‍👨🏻 Kiss - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍💋‍👨 Kiss - Woman: Medium Skin Tone, Man 👨‍👧‍👶 Family: Man, Girl, Baby 👩🏻‍❤️‍💋‍👨🏾 Kiss - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone 👨‍❤️‍👨🏼 Couple With Heart - Man, Man: Medium-Light Skin Tone 👩🏼‍❤️‍💋‍👩🏼 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏿‍❤️‍💋‍👩🏿 Kiss - Man: Dark Skin Tone, Woman: Dark Skin Tone 👨‍❤️‍💋‍👩🏼 Kiss - Man, Woman: Medium-Light Skin Tone 🏴󠁣󠁩󠁡󠁢󠁿 Flag for Abidjan (CI-AB) 👩🏻‍❤️‍💋‍👨 Kiss - Woman: Light Skin Tone, Man 👩🏼‍❤️‍💋‍👩🏾 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏻‍❤️‍💋‍👩🏼 Kiss - Man: Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏽‍❤️‍💋‍👨🏿 Kiss - Woman: Medium Skin Tone, Man: Dark Skin Tone 👩🏿‍❤️‍💋‍👩🏼 Kiss - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone 👩🏿‍❤️‍💋‍👨🏾 Kiss - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone 👩🏼‍❤️‍💋‍👨 Kiss - Woman: Medium-Light Skin Tone, Man 👩‍❤️‍👩🏾 Couple With Heart - Woman, Woman: Medium-Dark Skin Tone 👨🏿‍❤️‍👨🏼 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👨🏼‍❤️‍👩🏼 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏼‍❤️‍👨🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁳󠁳󠁷󠁿 Flag for Spanish Wells (BS-SW) 👨🏿‍❤️‍👨🏿 Couple With Heart - Man: Dark Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍👨🏿 Couple With Heart - Man: Medium-Light Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍👩 Couple With Heart - Man: Medium-Light Skin Tone, Woman 👩🏼‍❤️‍👩🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👨🏻 Couple With Heart - Man: Medium-Light Skin Tone, Man: Light Skin Tone 👨🏾‍❤️‍👨🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍👩🏼 Couple With Heart - Woman, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👩🏿 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨🏻‍❤️‍👨🏾 Couple With Heart - Man: Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍👩🏾 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone 👩‍❤️‍👩🏿 Couple With Heart - Woman, Woman: Dark Skin Tone 👨🏽‍❤️‍👨🏿 Couple With Heart - Man: Medium Skin Tone, Man: Dark Skin Tone 👨‍👨‍👦‍👶 Family: Man, Man, Boy, Baby 👨🏿‍❤️‍👨 Couple With Heart - Man: Dark Skin Tone, Man 👩🏻‍❤️‍👩🏿 Couple With Heart - Woman: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁳󠁳󠁳󠁿 Flag for San Salvador (BS-SS) 🏴󠁢󠁴󠀱󠀴󠁿 Flag for Samtse (BT-14) 👩🏻‍❤️‍👨🏽 Couple With Heart - Woman: Light Skin Tone, Man: Medium Skin Tone 👩🏼‍❤️‍👩🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨‍❤️‍👩🏿 Couple With Heart - Man, Woman: Dark Skin Tone 🏴󠁢󠁴󠀱󠀱󠁿 Flag for Paro (BT-11) 👨🏻‍❤️‍👩🏾 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁢󠁴󠀱󠀵󠁿 Flag for Thimphu (BT-15) 👩🏾‍❤️‍👩🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁳󠁷󠁧󠁿 Flag for West Grand Bahama (BS-WG) 🏴󠁢󠁴󠀱󠀳󠁿 Flag for Haa (BT-13) 🏴󠁢󠁴󠀱󠀲󠁿 Flag for Chukha (BT-12) 👨🏻‍❤️‍💋‍👨🏽 Kiss - Man: Light Skin Tone, Man: Medium Skin Tone 👨🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏽‍👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁳󠁡󠁫󠁿 Flag for Acklins (BS-AK) 🏴󠁢󠁴󠀳󠀲󠁿 Flag for Trongsa (BT-32) 🏴󠁢󠁴󠀴󠀱󠁿 Flag for Trashigang (BT-41) 🏴󠁢󠁴󠀲󠀳󠁿 Flag for Punakha (BT-23) 🏴󠁢󠁴󠀲󠀴󠁿 Flag for Wangdue Phodrang (BT-24) 🏴󠁢󠁴󠀳󠀳󠁿 Flag for Bumthang (BT-33) 🏴󠁢󠁴󠀳󠀴󠁿 Flag for Zhemgang (BT-34) 👩🏼‍❤️‍💋‍👨🏼 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁴󠀴󠀲󠁿 Flag for Mongar (BT-42) 🏴󠁢󠁲󠁰󠁢󠁿 Flag for Paraíba (BR-PB) 👩🏿‍❤️‍👨🏼 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁣󠁨󠁺󠁨󠁿 Flag for Zürich (CH-ZH) 🏴󠁢󠁴󠀳󠀱󠁿 Flag for Sarpang (BT-31) 🏴󠁢󠁴󠀲󠀲󠁿 Flag for Dagana (BT-22) 👩🏻‍❤️‍💋‍👨🏽 Kiss - Woman: Light Skin Tone, Man: Medium Skin Tone 👨🏿‍👨🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁷󠁣󠁥󠁿 Flag for Central (BW-CE) 🏴󠁢󠁴󠁧󠁡󠁿 Flag for Gasa (BT-GA) 🏴󠁢󠁷󠁣󠁨󠁿 Flag for Chobe (BW-CH) 🏴󠁢󠁴󠀴󠀵󠁿 Flag for Samdrup Jongkhar (BT-45) 🏴󠁢󠁷󠁦󠁲󠁿 Flag for Francistown (BW-FR) 🏴󠁢󠁴󠀴󠀴󠁿 Flag for Lhuntse (BT-44) 🏴󠁢󠁴󠁴󠁹󠁿 Flag for Trashiyangtse (BT-TY) 🏴󠁢󠁴󠀲󠀱󠁿 Flag for Tsirang (BT-21) 🏴󠁢󠁴󠀴󠀳󠁿 Flag for Pemagatshel (BT-43) 👨🏿‍👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁷󠁮󠁥󠁿 Flag for North East (BW-NE) 🏴󠁢󠁷󠁫󠁬󠁿 Flag for Kgatleng (BW-KL) 🏴󠁢󠁷󠁫󠁧󠁿 Flag for Kgalagadi (BW-KG) 🏴󠁢󠁷󠁳󠁥󠁿 Flag for South East (BW-SE) 🏴󠁢󠁷󠁫󠁷󠁿 Flag for Kweneng (BW-KW) 👨🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁷󠁮󠁷󠁿 Flag for North West (BW-NW) 🏴󠁢󠁷󠁪󠁷󠁿 Flag for Jwaneng (BW-JW) 🏴󠁢󠁳󠁭󠁣󠁿 Flag for Mangrove Cay (BS-MC) 👩🏼‍❤️‍💋‍👩🏿 Kiss - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁷󠁧󠁨󠁿 Flag for Ghanzi (BW-GH) 👨🏻‍❤️‍👩🏻 Couple With Heart - Man: Light Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁪󠁡󠁱󠁿 Flag for Atlantique (BJ-AQ) 👨🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👨🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁷󠁳󠁯󠁿 Flag for Southern (BW-SO) 👨🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍❤️‍👩 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman 👨‍👩‍👶‍👧 Family: Man, Woman, Baby, Girl 👨🏽‍❤️‍💋‍👨🏾 Kiss - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁷󠁳󠁴󠁿 Flag for Sowa Town (BW-ST) 🏴󠁢󠁷󠁳󠁰󠁿 Flag for Selibe Phikwe (BW-SP) 👩🏿‍❤️‍👩🏾 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩‍👨‍👦‍👦 Family: Woman, Man, Boy, Boy 👩🏿‍👨🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁹󠁨󠁭󠁿 Flag for Minsk (BY-HM) 🏴󠁢󠁹󠁨󠁯󠁿 Flag for Homel (BY-HO) 👨🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👨🏻‍👩🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁴󠁲󠀳󠀵󠁿 Flag for Izmir (TR-35) 🏴󠁢󠁹󠁨󠁲󠁿 Flag for Hrodna (BY-HR) 🏴󠁢󠁹󠁭󠁡󠁿 Flag for Magileu (BY-MA) 🏴󠁢󠁹󠁭󠁩󠁿 Flag for Minsk Region (BY-MI) 👨🏼‍❤️‍💋‍👩🏿 Kiss - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨🏾‍❤️‍👩🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁺󠁢󠁺󠁿 Flag for Belize (BZ-BZ) 🏴󠁢󠁷󠁬󠁯󠁿 Flag for Lobatse (BW-LO) 👩‍👦‍👧 Family: Woman, Boy, Girl 👨🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁢󠁳󠁭󠁩󠁿 Flag for Moore’s Island (BS-MI) 🏴󠁢󠁪󠁭󠁯󠁿 Flag for Mono (BJ-MO) 👨🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁹󠁶󠁩󠁿 Flag for Vitebsk (BY-VI) 🏴󠁢󠁺󠁳󠁣󠁿 Flag for Stann Creek (BZ-SC) 👨🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁺󠁣󠁺󠁬󠁿 Flag for Corozal (BZ-CZL) 👨🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁺󠁴󠁯󠁬󠁿 Flag for Toledo (BZ-TOL) 🏴󠁮󠁰󠀵󠁿 Flag for Sudur Pashchimanchal (NP-5) 🏴󠁢󠁳󠁨󠁩󠁿 Flag for Harbour Island (BS-HI) 🏴󠁣󠁡󠁡󠁢󠁿 Flag for Alberta (CA-AB) 👩🏾‍❤️‍👨🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍💋‍👨🏼 Kiss - Man: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁬󠁡󠁶󠁩󠁿 Flag for Vientiane Province (LA-VI) 👨‍👩‍👦‍👧 Family: Man, Woman, Boy, Girl 👨🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁣󠁡󠁰󠁥󠁿 Flag for Prince Edward Island (CA-PE) 🏴󠁣󠁤󠁫󠁧󠁿 Flag for Kwango (CD-KG) 🏴󠁣󠁡󠁮󠁳󠁿 Flag for Nova Scotia (CA-NS) 👨🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁣󠁤󠁨󠁵󠁿 Flag for Haut-Uélé (CD-HU) 🏴󠁣󠁤󠁢󠁣󠁿 Flag for Bas-Congo (CD-BC) 🏴󠁣󠁤󠁳󠁵󠁿 Flag for Sud-Ubangi (CD-SU) 🏴󠁣󠁤󠁭󠁡󠁿 Flag for Maniema (CD-MA) 🏴󠁣󠁤󠁳󠁡󠁿 Flag for Sankuru (CD-SA) 🏴󠁣󠁤󠁴󠁵󠁿 Flag for Tshuapa (CD-TU) 🏴󠁣󠁡󠁹󠁴󠁿 Flag for Yukon (CA-YT) 🏴󠁣󠁤󠁭󠁯󠁿 Flag for Mongala (CD-MO) 🏴󠁣󠁦󠁢󠁢󠁿 Flag for Bamingui-Bangoran (CF-BB) 🏴󠁣󠁤󠁭󠁮󠁿 Flag for Mai-Ndombe (CD-MN) 🏴󠁣󠁡󠁮󠁵󠁿 Flag for Nunavut (CA-NU) 🏴󠁣󠁤󠁫󠁬󠁿 Flag for Kwilu (CD-KL) 🏴󠁣󠁡󠁮󠁢󠁿 Flag for New Brunswick (CA-NB) 🏴󠁣󠁦󠁢󠁧󠁦󠁿 Flag for Bangui (CF-BGF) 🏴󠁣󠁤󠁫󠁮󠁿 Flag for Kinshasa (CD-KN) 🏴󠁣󠁤󠁮󠁫󠁿 Flag for North Kivu (CD-NK) 🏴󠁣󠁡󠁮󠁴󠁿 Flag for Northwest Territories (CA-NT) 🏴󠁣󠁤󠁴󠁯󠁿 Flag for Tshopo (CD-TO) 🏴󠁣󠁤󠁢󠁵󠁿 Flag for Bas-Uélé (CD-BU) 🏴󠁣󠁤󠁨󠁬󠁿 Flag for Haut-Lomami (CD-HL) 🏴󠁣󠁤󠁨󠁫󠁿 Flag for Haut-Katanga (CD-HK) 🏴󠁣󠁤󠁫󠁥󠁿 Flag for Kasaï-Oriental (CD-KE) 🏴󠁣󠁤󠁳󠁫󠁿 Flag for South Kivu (CD-SK) 🏴󠁣󠁡󠁯󠁮󠁿 Flag for Ontario (CA-ON) 🏴󠁣󠁦󠁡󠁣󠁿 Flag for Ouham (CF-AC) 🏴󠁣󠁦󠁨󠁳󠁿 Flag for Mambéré-Kadéï (CF-HS) 🏴󠁣󠁤󠁫󠁣󠁿 Flag for Kasaï Central (CD-KC) 🏴󠁣󠁤󠁮󠁵󠁿 Flag for Nord-Ubangi (CD-NU) 🏴󠁣󠁤󠁫󠁳󠁿 Flag for Kasaï (CD-KS) 🏴󠁣󠁤󠁩󠁴󠁿 Flag for Ituri (CD-IT) 🏴󠁣󠁨󠁢󠁥󠁿 Flag for Bern (CH-BE) 🏴󠁣󠁧󠀲󠁿 Flag for Lékoumou (CG-2) 🏴󠁣󠁨󠁡󠁩󠁿 Flag for Appenzell Innerrhoden (CH-AI) 🏴󠁣󠁦󠁭󠁰󠁿 Flag for Ombella-M’Poko (CF-MP) 👨🏻‍👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone 🏴󠁣󠁦󠁫󠁧󠁿 Flag for Kémo (CF-KG) 🏴󠁣󠁧󠀱󠀳󠁿 Flag for Sangha (CG-13) 🏴󠁣󠁨󠁬󠁵󠁿 Flag for Lucerne (CH-LU) 🏴󠁣󠁨󠁧󠁥󠁿 Flag for Geneva (CH-GE) 🏴󠁣󠁨󠁮󠁷󠁿 Flag for Nidwalden (CH-NW) 🏴󠁣󠁧󠀵󠁿 Flag for Kouilou (CG-5) 🏴󠁣󠁧󠀷󠁿 Flag for Likouala (CG-7) 🏴󠁣󠁧󠁢󠁺󠁶󠁿 Flag for Brazzaville (CG-BZV) 🏴󠁣󠁨󠁳󠁨󠁿 Flag for Schaffhausen (CH-SH) 🏴󠁣󠁤󠁬󠁯󠁿 Flag for Lomami (CD-LO) 🏴󠁣󠁨󠁡󠁲󠁿 Flag for Appenzell Ausserrhoden (CH-AR) 🏴󠁣󠁨󠁳󠁺󠁿 Flag for Schwyz (CH-SZ) 🏴󠁣󠁨󠁮󠁥󠁿 Flag for Neuchâtel (CH-NE) 🏴󠁣󠁦󠁯󠁰󠁿 Flag for Ouham-Pendé (CF-OP) 🏴󠁣󠁨󠁧󠁲󠁿 Flag for Graubünden (CH-GR) 🏴󠁣󠁨󠁳󠁯󠁿 Flag for Solothurn (CH-SO) 🏴󠁣󠁨󠁦󠁲󠁿 Flag for Fribourg (CH-FR) 🏴󠁣󠁧󠀱󠀴󠁿 Flag for Plateaux (CG-14) 🏴󠁣󠁦󠁳󠁥󠁿 Flag for Sangha-Mbaéré (CF-SE) 👨🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁣󠁨󠁡󠁧󠁿 Flag for Aargau (CH-AG) 🏴󠁣󠁧󠀱󠀵󠁿 Flag for Cuvette-Ouest (CG-15) 🏴󠁣󠁨󠁳󠁧󠁿 Flag for St. Gallen (CH-SG) 🏴󠁣󠁧󠀸󠁿 Flag for Cuvette (CG-8) 🏴󠁣󠁨󠁯󠁷󠁿 Flag for Obwalden (CH-OW) 🏴󠁣󠁨󠁢󠁳󠁿 Flag for Basel-Stadt (CH-BS) 🏴󠁣󠁦󠁬󠁢󠁿 Flag for Lobaye (CF-LB) 🏴󠁣󠁬󠁶󠁳󠁿 Flag for Valparaíso (CL-VS) 🏴󠁣󠁭󠁮󠁷󠁿 Flag for Northwest (CM-NW) 🏴󠁣󠁩󠁤󠁮󠁿 Flag for Denguélé (CI-DN) 🏴󠁣󠁭󠁮󠁯󠁿 Flag for North (CM-NO) 🏴󠁣󠁩󠁹󠁭󠁿 Flag for Yamoussoukro (CI-YM) 🏴󠁣󠁭󠁥󠁳󠁿 Flag for East (CM-ES) 👨🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁣󠁩󠁷󠁲󠁿 Flag for Woroba (CI-WR) 🏴󠁣󠁩󠁬󠁧󠁿 Flag for Lagunes (CI-LG) 🏴󠁣󠁩󠁧󠁤󠁿 Flag for Gôh-Djiboua (CI-GD) 🏴󠁣󠁩󠁣󠁭󠁿 Flag for Comoé (CI-CM) 🏴󠁣󠁭󠁳󠁷󠁿 Flag for Southwest (CM-SW) 🏴󠁣󠁬󠁢󠁩󠁿 Flag for Bío Bío (CL-BI) 🏴󠁣󠁬󠁡󠁩󠁿 Flag for Aysén (CL-AI) 🏴󠁣󠁬󠁲󠁭󠁿 Flag for Santiago Metropolitan (CL-RM) 🏴󠁣󠁬󠁴󠁡󠁿 Flag for Tarapacá (CL-TA) 🏴󠁣󠁭󠁳󠁵󠁿 Flag for South (CM-SU) 🏴󠁣󠁬󠁡󠁴󠁿 Flag for Atacama (CL-AT) 🏴󠁣󠁮󠀱󠀲󠁿 Flag for Tianjin (CN-12) 🏴󠁣󠁩󠁬󠁣󠁿 Flag for Lacs (CI-LC) 🏴󠁣󠁬󠁣󠁯󠁿 Flag for Coquimbo (CL-CO) 🏴󠁣󠁬󠁡󠁰󠁿 Flag for Arica y Parinacota (CL-AP) 🏴󠁣󠁭󠁬󠁴󠁿 Flag for Littoral (CM-LT) 🏴󠁣󠁭󠁣󠁥󠁿 Flag for Centre (CM-CE) 🏴󠁣󠁭󠁥󠁮󠁿 Flag for Far North (CM-EN) 🏴󠁣󠁬󠁭󠁡󠁿 Flag for Magallanes Region (CL-MA) 🏴󠁣󠁬󠁭󠁬󠁿 Flag for Maule (CL-ML) 🏴󠁣󠁩󠁭󠁧󠁿 Flag for Montagnes (CI-MG) 🏴󠁣󠁩󠁢󠁳󠁿 Flag for Bas-Sassandra (CI-BS) 🏴󠁣󠁭󠁡󠁤󠁿 Flag for Adamawa (CM-AD) 🏴󠁣󠁬󠁬󠁲󠁿 Flag for Los Ríos (CL-LR) 🏴󠁣󠁭󠁯󠁵󠁿 Flag for West (CM-OU) 🏴󠁣󠁩󠁳󠁶󠁿 Flag for Savanes (CI-SV) 🏴󠁣󠁬󠁬󠁬󠁿 Flag for Los Lagos (CL-LL) 🏴󠁣󠁮󠀳󠀷󠁿 Flag for Shandong (CN-37) 🏴󠁣󠁮󠀶󠀲󠁿 Flag for Gansu (CN-62) 🏴󠁣󠁮󠀳󠀱󠁿 Flag for Shanghai (CN-31) 🏴󠁣󠁮󠀳󠀶󠁿 Flag for Jiangxi (CN-36) 🏴󠁣󠁮󠀷󠀱󠁿 Flag for Taiwan (CN-71) 🏴󠁣󠁯󠁢󠁯󠁹󠁿 Flag for Boyacá (CO-BOY) 🏴󠁣󠁮󠀱󠀱󠁿 Flag for Beijing (CN-11) 🏴󠁢󠁧󠀱󠀸󠁿 Flag for Ruse (BG-18) 🏴󠁣󠁮󠀴󠀴󠁿 Flag for Guangdong (CN-44) 🏴󠁣󠁮󠀶󠀳󠁿 Flag for Qinghai (CN-63) 🏴󠁣󠁮󠀲󠀳󠁿 Flag for Heilongjiang (CN-23) 🏴󠁣󠁮󠀵󠀱󠁿 Flag for Sichuan (CN-51) 🏴󠁣󠁯󠁣󠁡󠁬󠁿 Flag for Caldas (CO-CAL) 🏴󠁣󠁯󠁢󠁯󠁬󠁿 Flag for Bolívar (CO-BOL) 🏴󠁣󠁮󠀵󠀳󠁿 Flag for Yunnan (CN-53) 🏴󠁣󠁯󠁡󠁴󠁬󠁿 Flag for Atlántico (CO-ATL) 🏴󠁣󠁮󠀴󠀲󠁿 Flag for Hubei (CN-42) 🏴󠁣󠁮󠀲󠀲󠁿 Flag for Jilin (CN-22) 🏴󠁣󠁯󠁣󠁡󠁱󠁿 Flag for Caquetá (CO-CAQ) 🏴󠁣󠁮󠀳󠀳󠁿 Flag for Zhejiang (CN-33) 🏴󠁣󠁮󠀱󠀳󠁿 Flag for Hebei (CN-13) 🏴󠁣󠁮󠀱󠀵󠁿 Flag for Inner Mongolia (CN-15) 🏴󠁣󠁮󠀴󠀳󠁿 Flag for Hunan (CN-43) 🏴󠁣󠁦󠁨󠁫󠁿 Flag for Haute-Kotto (CF-HK) 🏴󠁣󠁮󠀶󠀵󠁿 Flag for Xinjiang (CN-65) 🏴󠁣󠁮󠀵󠀰󠁿 Flag for Chongqing (CN-50) 🏴󠁣󠁮󠀴󠀵󠁿 Flag for Guangxi (CN-45) 🏴󠁣󠁮󠀵󠀴󠁿 Flag for Tibet (CN-54) 🏴󠁣󠁮󠀳󠀲󠁿 Flag for Jiangsu (CN-32) 🏴󠁣󠁯󠁡󠁲󠁡󠁿 Flag for Arauca (CO-ARA) 🏴󠁣󠁮󠀳󠀵󠁿 Flag for Fujian (CN-35) 🏴󠁣󠁮󠀴󠀱󠁿 Flag for Henan (CN-41) 🏴󠁣󠁮󠀴󠀶󠁿 Flag for Hainan (CN-46) 🏴󠁣󠁮󠀱󠀴󠁿 Flag for Shanxi (CN-14) 🏴󠁣󠁯󠁭󠁡󠁧󠁿 Flag for Magdalena (CO-MAG) 🏴󠁣󠁯󠁣󠁨󠁯󠁿 Flag for Chocó (CO-CHO) 🏴󠁣󠁯󠁧󠁵󠁡󠁿 Flag for Guainía (CO-GUA) 🏴󠁣󠁯󠁣󠁯󠁲󠁿 Flag for Córdoba (CO-COR) 🏴󠁣󠁯󠁰󠁵󠁴󠁿 Flag for Putumayo (CO-PUT) 🏴󠁣󠁯󠁳󠁡󠁮󠁿 Flag for Santander (CO-SAN) 🏴󠁣󠁵󠀰󠀵󠁿 Flag for Villa Clara (CU-05) 🏴󠁣󠁯󠁶󠁡󠁣󠁿 Flag for Valle del Cauca (CO-VAC) 🏴󠁣󠁯󠁱󠁵󠁩󠁿 Flag for Quindío (CO-QUI) 🏴󠁣󠁯󠁲󠁩󠁳󠁿 Flag for Risaralda (CO-RIS) 🏴󠁣󠁯󠁣󠁵󠁮󠁿 Flag for Cundinamarca (CO-CUN) 👨🏽‍👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁣󠁲󠁡󠁿 Flag for Alajuela (CR-A) 🏴󠁣󠁲󠁰󠁿 Flag for Puntarenas (CR-P) 🏴󠁣󠁯󠁨󠁵󠁩󠁿 Flag for Huila (CO-HUI) 🏴󠁣󠁯󠁶󠁡󠁵󠁿 Flag for Vaupés (CO-VAU) 🏴󠁣󠁯󠁣󠁡󠁵󠁿 Flag for Cauca (CO-CAU) 🏴󠁣󠁵󠀰󠀷󠁿 Flag for Sancti Spíritus (CU-07) 🏴󠁣󠁲󠁬󠁿 Flag for Limón (CR-L) 🏴󠁣󠁯󠁮󠁳󠁡󠁿 Flag for Norte de Santander (CO-NSA) 🏴󠁣󠁵󠀰󠀴󠁿 Flag for Matanzas (CU-04) 🏴󠁣󠁲󠁧󠁿 Flag for Guanacaste (CR-G) 🏴󠁣󠁵󠀰󠀳󠁿 Flag for Havana (CU-03) 👩🏾‍❤️‍💋‍👨 Kiss - Woman: Medium-Dark Skin Tone, Man 🏴󠁣󠁵󠀰󠀸󠁿 Flag for Ciego de Ávila (CU-08) 🏴󠁣󠁯󠁴󠁯󠁬󠁿 Flag for Tolima (CO-TOL) 🏴󠁣󠁵󠀰󠀹󠁿 Flag for Camagüey (CU-09) 🏴󠁣󠁵󠀰󠀶󠁿 Flag for Cienfuegos (CU-06) 🏴󠁣󠁯󠁧󠁵󠁶󠁿 Flag for Guaviare (CO-GUV) 🏴󠁢󠁺󠁣󠁹󠁿 Flag for Cayo (BZ-CY) 🏴󠁥󠁴󠁳󠁮󠁿 Flag for Southern Nations, Nationalities, and Peoples (ET-SN) 🏴󠁣󠁵󠀰󠀱󠁿 Flag for Pinar del Río (CU-01) 🏴󠁣󠁲󠁳󠁪󠁿 Flag for San José (CR-SJ) 🏴󠁣󠁲󠁣󠁿 Flag for Cartago (CR-C) 🏴󠁣󠁯󠁬󠁡󠁧󠁿 Flag for La Guajira (CO-LAG) 🏴󠁣󠁹󠀰󠀲󠁿 Flag for Limassol (CY-02) 🏴󠁤󠁥󠁮󠁩󠁿 Flag for Lower Saxony (DE-NI) 🏴󠁢󠁺󠁯󠁷󠁿 Flag for Orange Walk (BZ-OW) 🏴󠁣󠁺󠀶󠀳󠁿 Flag for Kraj Vysočina (CZ-63) 🏴󠁣󠁺󠀵󠀱󠁿 Flag for Liberecký kraj (CZ-51) 🏴󠁣󠁵󠀱󠀰󠁿 Flag for Las Tunas (CU-10) 🏴󠁣󠁵󠀱󠀳󠁿 Flag for Santiago de Cuba (CU-13) 👨🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁣󠁹󠀰󠀱󠁿 Flag for Nicosia (CY-01) 🏴󠁣󠁺󠀲󠀰󠁿 Flag for Středočeský kraj (CZ-20) 🏴󠁣󠁦󠁶󠁫󠁿 Flag for Vakaga (CF-VK) 🏴󠁣󠁺󠀵󠀲󠁿 Flag for Královéhradecký kraj (CZ-52) 🏴󠁣󠁺󠀴󠀱󠁿 Flag for Karlovarský kraj (CZ-41) 🏴󠁣󠁵󠀱󠀵󠁿 Flag for Artemisa (CU-15) 🏴󠁣󠁹󠀰󠀴󠁿 Flag for Famagusta (CY-04) 🏴󠁤󠁥󠁨󠁢󠁿 Flag for Bremen (DE-HB) 🏴󠁤󠁥󠁨󠁥󠁿 Flag for Hesse (DE-HE) 🏴󠁣󠁵󠀱󠀱󠁿 Flag for Holguín (CU-11) 👨🏿‍👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁣󠁺󠀸󠀰󠁿 Flag for Moravskoslezský kraj (CZ-80) 🏴󠁣󠁺󠀳󠀱󠁿 Flag for Jihočeský kraj (CZ-31) 🏴󠁣󠁨󠁧󠁬󠁿 Flag for Glarus (CH-GL) 🏴󠁣󠁺󠀱󠀰󠁿 Flag for Praha, Hlavní mešto (CZ-10) 🏴󠁣󠁹󠀰󠀳󠁿 Flag for Larnaca (CY-03) 🏴󠁤󠁥󠁨󠁨󠁿 Flag for Hamburg (DE-HH) 🏴󠁤󠁥󠁭󠁶󠁿 Flag for Mecklenburg-Vorpommern (DE-MV) 🏴󠁣󠁶󠁢󠁿 Flag for Barlavento Islands (CV-B) 🏴󠁣󠁶󠁳󠁿 Flag for Sotavento Islands (CV-S) 🏴󠁣󠁵󠀱󠀶󠁿 Flag for Mayabeque (CU-16) 🏴󠁣󠁺󠀷󠀱󠁿 Flag for Olomoucký kraj (CZ-71) 🏴󠁣󠁵󠀱󠀴󠁿 Flag for Guantánamo (CU-14) 🏴󠁤󠁥󠁢󠁢󠁿 Flag for Brandenburg (DE-BB) 🏴󠁣󠁺󠀳󠀲󠁿 Flag for Plzeňský kraj (CZ-32) 🏴󠁤󠁪󠁡󠁳󠁿 Flag for Ali Sabieh (DJ-AS) 🏴󠁤󠁥󠁲󠁰󠁿 Flag for Rhineland-Palatinate (DE-RP) 🏴󠁤󠁥󠁳󠁮󠁿 Flag for Saxony (DE-SN) 🏴󠁤󠁫󠀸󠀵󠁿 Flag for Zealand (DK-85) 🏴󠁤󠁥󠁳󠁴󠁿 Flag for Saxony-Anhalt (DE-ST) 🏴󠁤󠁺󠀰󠀲󠁿 Flag for Chlef (DZ-02) 🏴󠁤󠁭󠀰󠀷󠁿 Flag for Saint Luke (DM-07) 🏴󠁤󠁪󠁡󠁲󠁿 Flag for Arta (DJ-AR) 🏴󠁤󠁫󠀸󠀴󠁿 Flag for Capital Region (DK-84) 🏴󠁤󠁭󠀱󠀰󠁿 Flag for Saint Paul (DM-10) 🏴󠁤󠁯󠀳󠀶󠁿 Flag for Cibao Sur (DO-36) 🏴󠁤󠁯󠀳󠀸󠁿 Flag for Enriquillo (DO-38) 🏴󠁤󠁭󠀰󠀹󠁿 Flag for Saint Patrick (DM-09) 🏴󠁤󠁯󠀳󠀴󠁿 Flag for Cibao Noroeste (DO-34) 🏴󠁤󠁯󠀳󠀳󠁿 Flag for Cibao Nordeste (DO-33) 🏴󠁤󠁭󠀰󠀵󠁿 Flag for Saint John (DM-05) 🏴󠁤󠁯󠀴󠀲󠁿 Flag for Yuma (DO-42) 🏴󠁤󠁪󠁯󠁢󠁿 Flag for Obock (DJ-OB) 🏴󠁤󠁥󠁴󠁨󠁿 Flag for Thuringia (DE-TH) 🏴󠁤󠁯󠀴󠀰󠁿 Flag for Ozama (DO-40) 🏴󠁤󠁥󠁳󠁬󠁿 Flag for Saarland (DE-SL) 🏴󠁤󠁭󠀰󠀴󠁿 Flag for Saint George (DM-04) 🏴󠁤󠁭󠀰󠀳󠁿 Flag for Saint David (DM-03) 🏴󠁤󠁭󠀰󠀲󠁿 Flag for Saint Andrew (DM-02) 🏴󠁤󠁪󠁤󠁩󠁿 Flag for Dikhil (DJ-DI) 🏴󠁤󠁭󠀰󠀸󠁿 Flag for Saint Mark (DM-08) 🏴󠁤󠁪󠁴󠁡󠁿 Flag for Tadjourah (DJ-TA) 🏴󠁤󠁭󠀱󠀱󠁿 Flag for Saint Peter (DM-11) 🏴󠁤󠁯󠀴󠀱󠁿 Flag for Valdesia (DO-41) 🏴󠁤󠁯󠀳󠀹󠁿 Flag for Higüamo (DO-39) 🏴󠁤󠁺󠀰󠀳󠁿 Flag for Laghouat (DZ-03) 🏴󠁤󠁺󠀲󠀸󠁿 Flag for M’Sila (DZ-28) 🏴󠁤󠁺󠀳󠀳󠁿 Flag for Illizi (DZ-33) 👩🏿‍👨🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁤󠁺󠀱󠀵󠁿 Flag for Tizi Ouzou (DZ-15) 🏴󠁤󠁺󠀱󠀴󠁿 Flag for Tiaret (DZ-14) 🏴󠁤󠁺󠀱󠀹󠁿 Flag for Sétif (DZ-19) 🏴󠁤󠁺󠀱󠀷󠁿 Flag for Djelfa (DZ-17) 🏴󠁤󠁺󠀲󠀵󠁿 Flag for Constantine (DZ-25) 🏴󠁤󠁺󠀲󠀴󠁿 Flag for Guelma (DZ-24) 🏴󠁤󠁺󠀴󠀲󠁿 Flag for Tipasa (DZ-42) 🏴󠁤󠁺󠀰󠀵󠁿 Flag for Batna (DZ-05) 🏴󠁤󠁺󠀱󠀲󠁿 Flag for Tébessa (DZ-12) 🏴󠁤󠁺󠀰󠀷󠁿 Flag for Biskra (DZ-07) 🏴󠁤󠁺󠀳󠀰󠁿 Flag for Ouargla (DZ-30) 🏴󠁤󠁺󠀲󠀲󠁿 Flag for Sidi Bel Abbès (DZ-22) 🏴󠁤󠁺󠀱󠀱󠁿 Flag for Tamanghasset (DZ-11) 🏴󠁤󠁺󠀲󠀶󠁿 Flag for Médéa (DZ-26) 🏴󠁤󠁺󠀳󠀲󠁿 Flag for El Bayadh (DZ-32) 🏴󠁤󠁺󠀴󠀰󠁿 Flag for Khenchela (DZ-40) 🏴󠁤󠁺󠀳󠀸󠁿 Flag for Tissemsilt (DZ-38) 🏴󠁤󠁺󠀳󠀹󠁿 Flag for El Oued (DZ-39) 🏴󠁤󠁺󠀴󠀱󠁿 Flag for Souk Ahras (DZ-41) 🏴󠁤󠁺󠀱󠀳󠁿 Flag for Tlemcen (DZ-13) 🏴󠁤󠁺󠀰󠀶󠁿 Flag for Béjaïa (DZ-06) 🏴󠁤󠁺󠀴󠀳󠁿 Flag for Mila (DZ-43) 🏴󠁤󠁺󠀲󠀰󠁿 Flag for Saïda (DZ-20) 🏴󠁤󠁺󠀳󠀱󠁿 Flag for Oran (DZ-31) 🏴󠁤󠁺󠀱󠀰󠁿 Flag for Bouira (DZ-10) 🏴󠁤󠁺󠀳󠀵󠁿 Flag for Boumerdès (DZ-35) 🏴󠁤󠁺󠀳󠀶󠁿 Flag for El Tarf (DZ-36) 🏴󠁤󠁺󠀱󠀶󠁿 Flag for Algiers (DZ-16) 🏴󠁤󠁺󠀳󠀷󠁿 Flag for Tindouf (DZ-37) 🏴󠁤󠁺󠀲󠀳󠁿 Flag for Annaba (DZ-23) 🏴󠁤󠁺󠀰󠀹󠁿 Flag for Blida (DZ-09) 🏴󠁤󠁺󠀰󠀴󠁿 Flag for Oum El Bouaghi (DZ-04) 🏴󠁤󠁺󠀲󠀷󠁿 Flag for Mostaganem (DZ-27) 🏴󠁥󠁣󠁨󠁿 Flag for Chimborazo (EC-H) 🏴󠁤󠁺󠀴󠀷󠁿 Flag for Ghardaïa (DZ-47) 🏴󠁥󠁣󠁢󠁿 Flag for Bolívar (EC-B) 🏴󠁥󠁣󠁣󠁿 Flag for Carchi (EC-C) 🏴󠁤󠁺󠀴󠀴󠁿 Flag for Aïn Defla (DZ-44) 🏴󠁣󠁹󠀰󠀵󠁿 Flag for Paphos (CY-05) 🏴󠁤󠁺󠀴󠀸󠁿 Flag for Relizane (DZ-48) 🏴󠁥󠁣󠁳󠁿 Flag for Morona-Santiago (EC-S) 🏴󠁣󠁨󠁪󠁵󠁿 Flag for Jura (CH-JU) 🏴󠁥󠁣󠁳󠁥󠁿 Flag for Santa Elena (EC-SE) 🏴󠁥󠁥󠀵󠀷󠁿 Flag for Lääne (EE-57) 🏴󠁥󠁣󠁩󠁿 Flag for Imbabura (EC-I) 🏴󠁤󠁺󠀴󠀶󠁿 Flag for Aïn Témouchent (DZ-46) 🏴󠁥󠁣󠁷󠁿 Flag for Galápagos (EC-W) 🏴󠁥󠁣󠁮󠁿 Flag for Napo (EC-N) 👨🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁥󠁥󠀶󠀷󠁿 Flag for Pärnu (EE-67) 🏴󠁥󠁥󠀷󠀸󠁿 Flag for Tartu (EE-78) 🏴󠁥󠁣󠁡󠁿 Flag for Azuay (EC-A) 🏴󠁥󠁣󠁭󠁿 Flag for Manabí (EC-M) 🏴󠁥󠁣󠁯󠁿 Flag for El Oro (EC-O) 🏴󠁥󠁣󠁰󠁿 Flag for Pichincha (EC-P) 🏴󠁥󠁥󠀷󠀰󠁿 Flag for Rapla (EE-70) 🏴󠁥󠁥󠀷󠀴󠁿 Flag for Saare (EE-74) 👨🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁥󠁥󠀶󠀵󠁿 Flag for Põlva (EE-65) 🏴󠁥󠁣󠁹󠁿 Flag for Pastaza (EC-Y) 🏴󠁥󠁣󠁧󠁿 Flag for Guayas (EC-G) 🏴󠁥󠁣󠁲󠁿 Flag for Los Ríos (EC-R) 🏴󠁥󠁣󠁵󠁿 Flag for Sucumbíos (EC-U) 🏴󠁥󠁥󠀴󠀹󠁿 Flag for Jõgeva (EE-49) 🏴󠁥󠁥󠀸󠀲󠁿 Flag for Valga (EE-82) 🏴󠁥󠁣󠁬󠁿 Flag for Loja (EC-L) 🏴󠁥󠁣󠁤󠁿 Flag for Orellana (EC-D) 👨🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁤󠁺󠀴󠀵󠁿 Flag for Naama (DZ-45) 🏴󠁥󠁥󠀵󠀱󠁿 Flag for Järva (EE-51) 🏴󠁥󠁧󠁳󠁩󠁮󠁿 Flag for North Sinai (EG-SIN) 🏴󠁥󠁧󠁪󠁳󠁿 Flag for South Sinai (EG-JS) 🏴󠁥󠁧󠁫󠁮󠁿 Flag for Qena (EG-KN) 🏴󠁥󠁥󠀸󠀴󠁿 Flag for Viljandi (EE-84) 🏴󠁥󠁧󠁩󠁳󠁿 Flag for Ismailia (EG-IS) 🏴󠁥󠁧󠁡󠁳󠁮󠁿 Flag for Aswan (EG-ASN) 🏴󠁥󠁧󠁤󠁫󠁿 Flag for Dakahlia (EG-DK) 🏴󠁥󠁧󠁧󠁨󠁿 Flag for Gharbia (EG-GH) 🏴󠁥󠁧󠁢󠁨󠁿 Flag for Beheira (EG-BH) 🏴󠁥󠁥󠀸󠀶󠁿 Flag for Võru (EE-86) 🏴󠁥󠁧󠁡󠁳󠁴󠁿 Flag for Asyut (EG-AST) 🏴󠁥󠁧󠁫󠁢󠁿 Flag for Qalyubia (EG-KB) 🏴󠁥󠁧󠁧󠁺󠁿 Flag for Giza (EG-GZ) 👨🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁥󠁲󠁡󠁮󠁿 Flag for Anseba (ER-AN) 🏴󠁥󠁧󠁫󠁦󠁳󠁿 Flag for Kafr el-Sheikh (EG-KFS) 🏴󠁥󠁧󠁭󠁴󠁿 Flag for Matrouh (EG-MT) 🏴󠁥󠁲󠁧󠁢󠁿 Flag for Gash-Barka (ER-GB) 🏴󠁥󠁧󠁭󠁮󠁿 Flag for Minya (EG-MN) 🏴󠁥󠁧󠁡󠁬󠁸󠁿 Flag for Alexandria (EG-ALX) 🏴󠁥󠁲󠁤󠁫󠁿 Flag for Southern Red Sea (ER-DK) 🏴󠁥󠁧󠁰󠁴󠁳󠁿 Flag for Port Said (EG-PTS) 🏴󠁥󠁧󠁳󠁨󠁧󠁿 Flag for Sohag (EG-SHG) 🏴󠁥󠁧󠁷󠁡󠁤󠁿 Flag for New Valley (EG-WAD) 🏴󠁥󠁲󠁳󠁫󠁿 Flag for Northern Red Sea (ER-SK) 🏴󠁥󠁧󠁳󠁵󠁺󠁿 Flag for Suez (EG-SUZ) 🏴󠁥󠁧󠁭󠁮󠁦󠁿 Flag for Monufia (EG-MNF) 🏴󠁥󠁧󠁬󠁸󠁿 Flag for Luxor (EG-LX) 🏴󠁥󠁲󠁭󠁡󠁿 Flag for Maekel (ER-MA) 🏴󠁥󠁧󠁤󠁴󠁿 Flag for Damietta (EG-DT) 🏴󠁥󠁧󠁳󠁨󠁲󠁿 Flag for Al Sharqia (EG-SHR) 🏴󠁥󠁧󠁦󠁹󠁭󠁿 Flag for Faiyum (EG-FYM) 🏴󠁥󠁲󠁤󠁵󠁿 Flag for Debub (ER-DU) 🏴󠁥󠁳󠁡󠁲󠁿 Flag for Aragon (ES-AR) 🏴󠁣󠁮󠀳󠀴󠁿 Flag for Anhui (CN-34) 🏴󠁤󠁫󠀸󠀱󠁿 Flag for Northern Denmark (DK-81) 👨🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁥󠁴󠁴󠁩󠁿 Flag for Tigray (ET-TI) 🏴󠁣󠁮󠀲󠀱󠁿 Flag for Liaoning (CN-21) 🏴󠁥󠁴󠁧󠁡󠁿 Flag for Gambela (ET-GA) 🏴󠁥󠁳󠁭󠁬󠁿 Flag for Melilla (ES-ML) 🏴󠁥󠁳󠁭󠁣󠁿 Flag for Murcia Region (ES-MC) 🏴󠁦󠁩󠀱󠀰󠁿 Flag for Lapland (FI-10) 🏴󠁦󠁩󠀰󠀷󠁿 Flag for Central Ostrobothnia (FI-07) 🏴󠁥󠁴󠁡󠁭󠁿 Flag for Amhara (ET-AM) 🏴󠁥󠁴󠁢󠁥󠁿 Flag for Benishangul-Gumuz (ET-BE) 🏴󠁥󠁴󠁯󠁲󠁿 Flag for Oromia (ET-OR) 🏴󠁥󠁳󠁲󠁩󠁿 Flag for La Rioja (ES-RI) 🏴󠁤󠁪󠁤󠁪󠁿 Flag for Djibouti (DJ-DJ) 🏴󠁥󠁳󠁭󠁤󠁿 Flag for Madrid Autonomous Community (ES-MD) 🏴󠁥󠁴󠁤󠁤󠁿 Flag for Dire Dawa (ET-DD) 🏴󠁤󠁺󠀲󠀹󠁿 Flag for Mascara (DZ-29) 🏴󠁦󠁩󠀰󠀵󠁿 Flag for Kainuu (FI-05) 🏴󠁦󠁩󠀰󠀹󠁿 Flag for Kymenlaakso (FI-09) 🏴󠁦󠁩󠀰󠀳󠁿 Flag for Southern Ostrobothnia (FI-03) 🏴󠁦󠁩󠀱󠀱󠁿 Flag for Pirkanmaa (FI-11) 🏴󠁦󠁩󠀰󠀴󠁿 Flag for Southern Savonia (FI-04) 🏴󠁦󠁩󠀱󠀳󠁿 Flag for North Karelia (FI-13) 🏴󠁦󠁩󠀰󠀲󠁿 Flag for South Karelia (FI-02) 🏴󠁥󠁴󠁨󠁡󠁿 Flag for Harari (ET-HA) 🏴󠁣󠁺󠀷󠀲󠁿 Flag for Zlínský kraj (CZ-72) 🏴󠁥󠁴󠁳󠁯󠁿 Flag for Somali (ET-SO) 🏴󠁥󠁳󠁣󠁴󠁿 Flag for Catalonia (ES-CT) 🏴󠁦󠁭󠁫󠁳󠁡󠁿 Flag for Kosrae (FM-KSA) 🏴󠁦󠁲󠁮󠁣󠁿 Flag for New Caledonia (FR-NC) 🏴󠁦󠁲󠁯󠁣󠁣󠁿 Flag for Occitanie (FR-OCC) 🏴󠁦󠁲󠁰󠁡󠁣󠁿 Flag for Provence-Alpes-Côte-d’Azur (FR-PAC) 🏴󠁦󠁩󠀱󠀵󠁿 Flag for Northern Savonia (FI-15) 🏴󠁦󠁭󠁴󠁲󠁫󠁿 Flag for Chuuk (FM-TRK) 🏴󠁦󠁲󠁢󠁦󠁣󠁿 Flag for Bourgogne-Franche-Comté (FR-BFC) 🏴󠁦󠁩󠀱󠀴󠁿 Flag for Northern Ostrobothnia (FI-14) 🏴󠁦󠁪󠁲󠁿 Flag for Rotuma (FJ-R) 🏴󠁦󠁲󠁭󠁡󠁹󠁿 Flag for Mayotte (FR-MAY) 🏴󠁦󠁲󠁮󠁡󠁱󠁿 Flag for Nouvelle-Aquitaine (FR-NAQ) 🏴󠁦󠁪󠁣󠁿 Flag for Central (FJ-C) 🏴󠁦󠁲󠁧󠁥󠁳󠁿 Flag for Grand-Est (FR-GES) 🏴󠁦󠁪󠁮󠁿 Flag for Northern (FJ-N) 🏴󠁦󠁲󠁧󠁵󠁡󠁿 Flag for Guadeloupe (FR-GUA) 🏴󠁦󠁭󠁹󠁡󠁰󠁿 Flag for Yap (FM-YAP) 🏴󠁦󠁲󠁢󠁲󠁥󠁿 Flag for Bretagne (FR-BRE) 🏴󠁦󠁲󠁰󠁦󠁿 Flag for French Polynesia (FR-PF) 🏴󠁦󠁲󠁮󠁯󠁲󠁿 Flag for Normandie (FR-NOR) 🏴󠁦󠁲󠁧󠁦󠁿 Flag for French Guiana (FR-GF) 🏴󠁦󠁲󠁣󠁶󠁬󠁿 Flag for Centre-Val de Loire (FR-CVL) 🏴󠁦󠁲󠁣󠁰󠁿 Flag for Clipperton Island (FR-CP) 🏴󠁦󠁲󠁭󠁦󠁿 Flag for St. Martin (FR-MF) 🏴󠁦󠁩󠀱󠀶󠁿 Flag for Päijänne Tavastia (FI-16) 🏴󠁦󠁩󠀱󠀹󠁿 Flag for Southwest Finland (FI-19) 🏴󠁦󠁲󠁬󠁲󠁥󠁿 Flag for La Réunion (FR-LRE) 🏴󠁦󠁩󠀱󠀷󠁿 Flag for Satakunta (FI-17) 🏴󠁧󠁥󠁳󠁫󠁿 Flag for Shida Kartli (GE-SK) 🏴󠁧󠁡󠀳󠁿 Flag for Moyen-Ogooué (GA-3) 👨🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁧󠁤󠀰󠀳󠁿 Flag for Saint George (GD-03) 🏴󠁧󠁡󠀵󠁿 Flag for Nyanga (GA-5) 🏴󠁧󠁡󠀶󠁿 Flag for Ogooué-Ivindo (GA-6) 🏴󠁧󠁨󠁢󠁡󠁿 Flag for Brong-Ahafo (GH-BA) 🏴󠁧󠁡󠀲󠁿 Flag for Haut-Ogooué (GA-2) 🏴󠁧󠁤󠀰󠀱󠁿 Flag for Saint Andrew (GD-01) 🏴󠁧󠁤󠀰󠀶󠁿 Flag for Saint Patrick (GD-06) 🏴󠁥󠁳󠁧󠁡󠁿 Flag for Galicia (ES-GA) 🏴󠁦󠁲󠁷󠁦󠁿 Flag for Wallis & Futuna (FR-WF) 👨🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁦󠁲󠁰󠁭󠁿 Flag for St. Pierre & Miquelon (FR-PM) 🏴󠁧󠁤󠀰󠀴󠁿 Flag for Saint John (GD-04) 🏴󠁧󠁥󠁴󠁢󠁿 Flag for Tbilisi (GE-TB) 👨🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁧󠁤󠀰󠀲󠁿 Flag for Saint David (GD-02) 🏴󠁧󠁥󠁧󠁵󠁿 Flag for Guria (GE-GU) 🏴󠁧󠁡󠀹󠁿 Flag for Woleu-Ntem (GA-9) 🏴󠁧󠁥󠁲󠁬󠁿 Flag for Racha-Lechkhumi and Kvemo Svaneti (GE-RL) 🏴󠁧󠁥󠁳󠁪󠁿 Flag for Samtskhe-Javakheti (GE-SJ) 🏴󠁧󠁥󠁭󠁭󠁿 Flag for Mtskheta-Mtianeti (GE-MM) 🏴󠁧󠁥󠁩󠁭󠁿 Flag for Imereti (GE-IM) 🏴󠁧󠁡󠀸󠁿 Flag for Ogooué-Maritime (GA-8) 🏴󠁣󠁮󠀶󠀱󠁿 Flag for Shaanxi (CN-61) 🏴󠁧󠁨󠁡󠁡󠁿 Flag for Greater Accra (GH-AA) 🏴󠁣󠁺󠀶󠀴󠁿 Flag for Jihomoravský kraj (CZ-64) 🏴󠁧󠁥󠁡󠁪󠁿 Flag for Adjara (GE-AJ) 🏴󠁧󠁥󠁳󠁺󠁿 Flag for Samegrelo-Zemo Svaneti (GE-SZ) 🏴󠁧󠁡󠀱󠁿 Flag for Estuaire (GA-1) 🏴󠁧󠁡󠀷󠁿 Flag for Ogooué-Lolo (GA-7) 🏴󠁧󠁮󠁤󠁿 Flag for Kindia Region (GN-D) 🏴󠁧󠁮󠁭󠁿 Flag for Mamou Region (GN-M) 👨🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁧󠁬󠁱󠁡󠁿 Flag for Qaasuitsup (GL-QA) 🏴󠁧󠁭󠁮󠁿 Flag for North Bank Division (GM-N) 🏴󠁧󠁬󠁳󠁭󠁿 Flag for Sermersooq (GL-SM) 🏴󠁧󠁨󠁮󠁰󠁿 Flag for Northern (GH-NP) 🏴󠁧󠁲󠁦󠁿 Flag for Ionian Islands (GR-F) 🏴󠁧󠁲󠁨󠁿 Flag for Central Greece (GR-H) 🏴󠁧󠁨󠁣󠁰󠁿 Flag for Central (GH-CP) 🏴󠁧󠁮󠁫󠁿 Flag for Kankan Region (GN-K) 🏴󠁧󠁲󠁬󠁿 Flag for South Aegean (GR-L) 🏴󠁧󠁲󠁩󠁿 Flag for Attica (GR-I) 🏴󠁧󠁭󠁵󠁿 Flag for Upper River Division (GM-U) 🏴󠁧󠁨󠁥󠁰󠁿 Flag for Eastern (GH-EP) 🏴󠁧󠁮󠁮󠁿 Flag for Nzérékoré Region (GN-N) 🏴󠁧󠁨󠁷󠁰󠁿 Flag for Western (GH-WP) 🏴󠁧󠁲󠁣󠁿 Flag for West Macedonia (GR-C) 🏴󠁧󠁱󠁣󠁿 Flag for Río Muni (GQ-C) 🏴󠁧󠁭󠁬󠁿 Flag for Lower River Division (GM-L) 🏴󠁧󠁨󠁵󠁥󠁿 Flag for Upper East (GH-UE) 🏴󠁧󠁮󠁣󠁿 Flag for Conakry (GN-C) 🏴󠁧󠁲󠁢󠁿 Flag for Central Macedonia (GR-B) 🏴󠁧󠁭󠁭󠁿 Flag for Central River Division (GM-M) 🏴󠁧󠁨󠁵󠁷󠁿 Flag for Upper West (GH-UW) 🏴󠁧󠁬󠁫󠁵󠁿 Flag for Kujalleq (GL-KU) 🏴󠁧󠁮󠁢󠁿 Flag for Boké Region (GN-B) 🏴󠁧󠁬󠁱󠁥󠁿 Flag for Qeqqata (GL-QE) 🏴󠁧󠁲󠁤󠁿 Flag for Epirus (GR-D) 🏴󠁧󠁨󠁡󠁨󠁿 Flag for Ashanti (GH-AH) 🏴󠁧󠁨󠁴󠁶󠁿 Flag for Volta (GH-TV) 🏴󠁧󠁲󠀶󠀹󠁿 Flag for Mount Athos (GR-69) 🏴󠁧󠁱󠁩󠁿 Flag for Insular (GQ-I) 🏴󠁧󠁭󠁷󠁿 Flag for West Coast Division (GM-W) 🏴󠁧󠁭󠁢󠁿 Flag for Banjul (GM-B) 🏴󠁧󠁮󠁬󠁿 Flag for Labé Region (GN-L) 🏴󠁧󠁲󠁥󠁿 Flag for Thessaly (GR-E) 🏴󠁧󠁮󠁦󠁿 Flag for Faranah Region (GN-F) 🏴󠁧󠁹󠁣󠁵󠁿 Flag for Cuyuni-Mazaruni (GY-CU) 🏴󠁨󠁮󠁡󠁴󠁿 Flag for Atlántida (HN-AT) 👨🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁧󠁴󠁨󠁵󠁿 Flag for Huehuetenango (GT-HU) 🏴󠁧󠁴󠁡󠁶󠁿 Flag for Alta Verapaz (GT-AV) 🏴󠁧󠁴󠁰󠁲󠁿 Flag for El Progreso (GT-PR) 🏴󠁧󠁷󠁮󠁿 Flag for Norte (GW-N) 🏴󠁧󠁴󠁳󠁵󠁿 Flag for Suchitepéquez (GT-SU) 🏴󠁧󠁹󠁰󠁭󠁿 Flag for Pomeroon-Supenaam (GY-PM) 🏴󠁧󠁴󠁩󠁺󠁿 Flag for Izabal (GT-IZ) 🏴󠁧󠁹󠁰󠁴󠁿 Flag for Potaro-Siparuni (GY-PT) 🏴󠁧󠁴󠁱󠁺󠁿 Flag for Quetzaltenango (GT-QZ) 🏴󠁧󠁴󠁣󠁭󠁿 Flag for Chimaltenango (GT-CM) 🏴󠁥󠁴󠁡󠁡󠁿 Flag for Addis Ababa (ET-AA) 🏴󠁧󠁷󠁢󠁳󠁿 Flag for Bissau (GW-BS) 🏴󠁧󠁴󠁱󠁣󠁿 Flag for Quiché (GT-QC) 🏴󠁧󠁴󠁴󠁯󠁿 Flag for Totonicapán (GT-TO) 🏴󠁧󠁹󠁢󠁡󠁿 Flag for Barima-Waini (GY-BA) 🏴󠁧󠁹󠁥󠁳󠁿 Flag for Essequibo Islands-West Demerara (GY-ES) 👨🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁨󠁮󠁣󠁨󠁿 Flag for Choluteca (HN-CH) 🏴󠁧󠁹󠁤󠁥󠁿 Flag for Demerara-Mahaica (GY-DE) 👨🏻‍👨🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone 🏴󠁧󠁴󠁳󠁡󠁿 Flag for Sacatepéquez (GT-SA) 🏴󠁧󠁴󠁪󠁵󠁿 Flag for Jutiapa (GT-JU) 🏴󠁧󠁴󠁣󠁱󠁿 Flag for Chiquimula (GT-CQ) 🏴󠁧󠁴󠁢󠁶󠁿 Flag for Baja Verapaz (GT-BV) 🏴󠁧󠁴󠁥󠁳󠁿 Flag for Escuintla (GT-ES) 🏴󠁧󠁴󠁺󠁡󠁿 Flag for Zacapa (GT-ZA) 🏴󠁧󠁷󠁳󠁿 Flag for Sul (GW-S) 🏴󠁧󠁷󠁬󠁿 Flag for Leste (GW-L) 🏴󠁧󠁴󠁪󠁡󠁿 Flag for Jalapa (GT-JA) 🏴󠁧󠁴󠁰󠁥󠁿 Flag for Petén (GT-PE) 🏴󠁧󠁴󠁳󠁯󠁿 Flag for Sololá (GT-SO) 🏴󠁨󠁮󠁣󠁭󠁿 Flag for Comayagua (HN-CM) 🏴󠁨󠁲󠀰󠀶󠁿 Flag for Koprivnica-Križevci (HR-06) 🏴󠁨󠁮󠁣󠁰󠁿 Flag for Copán (HN-CP) 🏴󠁨󠁮󠁩󠁢󠁿 Flag for Bay Islands (HN-IB) 🏴󠁨󠁲󠀰󠀹󠁿 Flag for Lika-Senj (HR-09) 🏴󠁨󠁮󠁳󠁢󠁿 Flag for Santa Bárbara (HN-SB) 🏴󠁨󠁮󠁩󠁮󠁿 Flag for Intibucá (HN-IN) 🏴󠁨󠁮󠁦󠁭󠁿 Flag for Francisco Morazán (HN-FM) 🏴󠁨󠁲󠀰󠀱󠁿 Flag for Zagreb County (HR-01) 🏴󠁨󠁮󠁣󠁬󠁿 Flag for Colón (HN-CL) 🏴󠁨󠁴󠁣󠁥󠁿 Flag for Centre (HT-CE) 🏴󠁨󠁲󠀰󠀸󠁿 Flag for Primorje-Gorski Kotar (HR-08) 🏴󠁨󠁮󠁬󠁥󠁿 Flag for Lempira (HN-LE) 🏴󠁨󠁲󠀱󠀴󠁿 Flag for Osijek-Baranja (HR-14) 🏴󠁨󠁲󠀱󠀲󠁿 Flag for Brod-Posavina (HR-12) 🏴󠁨󠁲󠀱󠀷󠁿 Flag for Split-Dalmatia (HR-17) 🏴󠁨󠁮󠁯󠁬󠁿 Flag for Olancho (HN-OL) 🏴󠁨󠁮󠁬󠁰󠁿 Flag for La Paz (HN-LP) 🏴󠁨󠁲󠀲󠀰󠁿 Flag for Međimurje (HR-20) 🏴󠁨󠁮󠁥󠁰󠁿 Flag for El Paraíso (HN-EP) 🏴󠁨󠁲󠀲󠀱󠁿 Flag for Zagreb (HR-21) 🏴󠁨󠁲󠀱󠀵󠁿 Flag for Šibenik-Knin (HR-15) 🏴󠁥󠁥󠀴󠀴󠁿 Flag for Ida-Viru (EE-44) 🏴󠁨󠁮󠁣󠁲󠁿 Flag for Cortés (HN-CR) 🏴󠁨󠁲󠀰󠀳󠁿 Flag for Sisak-Moslavina (HR-03) 🏴󠁨󠁲󠀱󠀳󠁿 Flag for Zadar (HR-13) 🏴󠁨󠁲󠀱󠀸󠁿 Flag for Istria (HR-18) 🏴󠁨󠁲󠀰󠀲󠁿 Flag for Krapina-Zagorje (HR-02) 🏴󠁨󠁲󠀱󠀶󠁿 Flag for Vukovar-Syrmia (HR-16) 🏴󠁨󠁮󠁹󠁯󠁿 Flag for Yoro (HN-YO) 🏴󠁨󠁴󠁡󠁲󠁿 Flag for Artibonite (HT-AR) 🏴󠁨󠁮󠁧󠁤󠁿 Flag for Gracias a Dios (HN-GD) 🏴󠁨󠁮󠁶󠁡󠁿 Flag for Valle (HN-VA) 🏴󠁤󠁺󠀱󠀸󠁿 Flag for Jijel (DZ-18) 🏴󠁨󠁲󠀱󠀹󠁿 Flag for Dubrovnik-Neretva (HR-19) 🏴󠁨󠁲󠀱󠀱󠁿 Flag for Požega-Slavonia (HR-11) 🏴󠁨󠁲󠀰󠀷󠁿 Flag for Bjelovar-Bilogora (HR-07) 🏴󠁨󠁮󠁯󠁣󠁿 Flag for Ocotepeque (HN-OC) 🏴󠁨󠁵󠁢󠁵󠁿 Flag for Budapest (HU-BU) 🏴󠁨󠁵󠁨󠁶󠁿 Flag for Hódmezővásárhely (HU-HV) 🏴󠁨󠁵󠁦󠁥󠁿 Flag for Fejér (HU-FE) 🏴󠁨󠁵󠁢󠁡󠁿 Flag for Baranya (HU-BA) 🏴󠁨󠁵󠁳󠁦󠁿 Flag for Székesfehérvár (HU-SF) 🏴󠁨󠁵󠁢󠁺󠁿 Flag for Borsod-Abaúj-Zemplén (HU-BZ) 🏴󠁨󠁵󠁣󠁳󠁿 Flag for Csongrád (HU-CS) 🏴󠁨󠁵󠁳󠁮󠁿 Flag for Sopron (HU-SN) 🏴󠁨󠁵󠁤󠁵󠁿 Flag for Dunaújváros (HU-DU) 🏴󠁨󠁵󠁫󠁶󠁿 Flag for Kaposvár (HU-KV) 🏴󠁨󠁵󠁮󠁹󠁿 Flag for Nyíregyháza (HU-NY) 🏴󠁨󠁵󠁨󠁢󠁿 Flag for Hajdú-Bihar (HU-HB) 🏴󠁨󠁴󠁯󠁵󠁿 Flag for Ouest (HT-OU) 🏴󠁨󠁵󠁳󠁤󠁿 Flag for Szeged (HU-SD) 🏴󠁨󠁵󠁰󠁥󠁿 Flag for Pest (HU-PE) 🏴󠁨󠁵󠁫󠁥󠁿 Flag for Komárom-Esztergom (HU-KE) 🏴󠁨󠁵󠁮󠁫󠁿 Flag for Nagykanizsa (HU-NK) 🏴󠁨󠁴󠁧󠁡󠁿 Flag for Grand’Anse (HT-GA) 🏴󠁨󠁵󠁢󠁣󠁿 Flag for Békéscsaba (HU-BC) 🏴󠁨󠁴󠁳󠁤󠁿 Flag for Sud (HT-SD) 🏴󠁨󠁴󠁮󠁯󠁿 Flag for Nord-Ouest (HT-NO) 🏴󠁨󠁵󠁨󠁥󠁿 Flag for Heves (HU-HE) 🏴󠁨󠁵󠁢󠁫󠁿 Flag for Bács-Kiskun (HU-BK) 🏴󠁨󠁵󠁭󠁩󠁿 Flag for Miskolc (HU-MI) 🏴󠁨󠁵󠁥󠁲󠁿 Flag for Érd (HU-ER) 👨🏽‍👨🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁨󠁴󠁮󠁩󠁿 Flag for Nippes (HT-NI) 🏴󠁨󠁵󠁳󠁫󠁿 Flag for Szolnok (HU-SK) 🏴󠁨󠁴󠁮󠁤󠁿 Flag for Nord (HT-ND) 🏴󠁨󠁴󠁳󠁥󠁿 Flag for Sud-Est (HT-SE) 🏴󠁨󠁵󠁪󠁮󠁿 Flag for Jász-Nagykun-Szolnok (HU-JN) 🏴󠁨󠁵󠁰󠁳󠁿 Flag for Pécs (HU-PS) 🏴󠁨󠁵󠁫󠁭󠁿 Flag for Kecskemét (HU-KM) 🏴󠁨󠁵󠁤󠁥󠁿 Flag for Debrecen (HU-DE) 🏴󠁨󠁵󠁢󠁥󠁿 Flag for Békés (HU-BE) 🏴󠁨󠁵󠁮󠁯󠁿 Flag for Nógrád (HU-NO) 🏴󠁨󠁵󠁳󠁨󠁿 Flag for Szombathely (HU-SH) 🏴󠁨󠁵󠁧󠁹󠁿 Flag for Győr (HU-GY) 🏴󠁩󠁤󠁮󠁵󠁿 Flag for Lesser Sunda Islands (ID-NU) 🏴󠁨󠁵󠁴󠁢󠁿 Flag for Tatabánya (HU-TB) 🏴󠁩󠁤󠁪󠁷󠁿 Flag for Java (ID-JW) 🏴󠁩󠁮󠁣󠁨󠁿 Flag for Chandigarh (IN-CH) 🏴󠁩󠁮󠁧󠁪󠁿 Flag for Gujarat (IN-GJ) 🏴󠁩󠁥󠁬󠁿 Flag for Leinster (IE-L) 🏴󠁨󠁵󠁺󠁡󠁿 Flag for Zala (HU-ZA) 🏴󠁩󠁮󠁤󠁤󠁿 Flag for Daman and Diu (IN-DD) 🏴󠁩󠁬󠁴󠁡󠁿 Flag for Tel Aviv District (IL-TA) 🏴󠁩󠁤󠁳󠁬󠁿 Flag for Sulawesi (ID-SL) 🏴󠁩󠁮󠁡󠁲󠁿 Flag for Arunachal Pradesh (IN-AR) 🏴󠁨󠁵󠁶󠁥󠁿 Flag for Veszprém County (HU-VE) 🏴󠁩󠁮󠁡󠁮󠁿 Flag for Andaman and Nicobar Islands (IN-AN) 🏴󠁨󠁵󠁳󠁯󠁿 Flag for Somogy (HU-SO) 🏴󠁨󠁵󠁶󠁡󠁿 Flag for Vas (HU-VA) 🏴󠁩󠁬󠁪󠁭󠁿 Flag for Jerusalem (IL-JM) 🏴󠁩󠁮󠁤󠁮󠁿 Flag for Dadra and Nagar Haveli (IN-DN) 🏴󠁨󠁵󠁶󠁭󠁿 Flag for Veszprém (HU-VM) 🏴󠁨󠁵󠁳󠁴󠁿 Flag for Salgótarján (HU-ST) 🏴󠁩󠁮󠁣󠁴󠁿 Flag for Chhattisgarh (IN-CT) 🏴󠁩󠁥󠁵󠁿 Flag for Ulster (IE-U) 🏴󠁩󠁮󠁤󠁬󠁿 Flag for Delhi (IN-DL) 🏴󠁩󠁥󠁭󠁿 Flag for Munster (IE-M) 🏴󠁩󠁥󠁣󠁿 Flag for Connacht (IE-C) 🏴󠁩󠁬󠁨󠁡󠁿 Flag for Haifa District (IL-HA) 🏴󠁩󠁤󠁫󠁡󠁿 Flag for Kalimantan (ID-KA) 🏴󠁩󠁮󠁧󠁡󠁿 Flag for Goa (IN-GA) 🏴󠁩󠁤󠁳󠁭󠁿 Flag for Sumatra (ID-SM) 🏴󠁩󠁤󠁰󠁰󠁿 Flag for Papua Islands (ID-PP) 🏴󠁨󠁵󠁳󠁳󠁿 Flag for Szekszárd (HU-SS) 🏴󠁩󠁬󠁺󠁿 Flag for Northern District (IL-Z) 🏴󠁨󠁵󠁴󠁯󠁿 Flag for Tolna (HU-TO) 🏴󠁩󠁬󠁭󠁿 Flag for Central District (IL-M) 🏴󠁩󠁬󠁤󠁿 Flag for Southern District (IL-D) 🏴󠁩󠁮󠁢󠁲󠁿 Flag for Bihar (IN-BR) 🏴󠁨󠁵󠁺󠁥󠁿 Flag for Zalaegerszeg (HU-ZE) 🏴󠁩󠁮󠁡󠁰󠁿 Flag for Andhra Pradesh (IN-AP) 🏴󠁩󠁱󠁤󠁡󠁿 Flag for Dohuk (IQ-DA) 🏴󠁩󠁮󠁪󠁨󠁿 Flag for Jharkhand (IN-JH) 🏴󠁩󠁮󠁫󠁬󠁿 Flag for Kerala (IN-KL) 🏴󠁩󠁮󠁷󠁢󠁿 Flag for West Bengal (IN-WB) 🏴󠁩󠁮󠁯󠁲󠁿 Flag for Odisha (IN-OR) 🏴󠁩󠁮󠁰󠁹󠁿 Flag for Puducherry (IN-PY) 🏴󠁩󠁱󠁫󠁡󠁿 Flag for Karbala (IQ-KA) 🏴󠁩󠁱󠁳󠁤󠁿 Flag for Saladin (IQ-SD) 🏴󠁩󠁮󠁭󠁺󠁿 Flag for Mizoram (IN-MZ) 🏴󠁩󠁮󠁨󠁰󠁿 Flag for Himachal Pradesh (IN-HP) 🏴󠁩󠁮󠁭󠁰󠁿 Flag for Madhya Pradesh (IN-MP) 🏴󠁩󠁮󠁰󠁢󠁿 Flag for Punjab (IN-PB) 🏴󠁩󠁮󠁮󠁬󠁿 Flag for Nagaland (IN-NL) 🏴󠁩󠁱󠁱󠁡󠁿 Flag for Al-Qādisiyyah (IQ-QA) 🏴󠁩󠁱󠁤󠁩󠁿 Flag for Diyala (IQ-DI) 🏴󠁩󠁱󠁮󠁩󠁿 Flag for Nineveh (IQ-NI) 🏴󠁩󠁱󠁤󠁱󠁿 Flag for Dhi Qar (IQ-DQ) 🏴󠁩󠁮󠁭󠁬󠁿 Flag for Meghalaya (IN-ML) 🏴󠁩󠁮󠁴󠁮󠁿 Flag for Tamil Nadu (IN-TN) 🏴󠁩󠁱󠁮󠁡󠁿 Flag for Najaf (IQ-NA) 🏴󠁩󠁱󠁭󠁵󠁿 Flag for Al Muthanna (IQ-MU) 🏴󠁩󠁮󠁴󠁧󠁿 Flag for Telangana (IN-TG) 🏴󠁩󠁮󠁨󠁲󠁿 Flag for Haryana (IN-HR) 🏴󠁩󠁮󠁵󠁴󠁿 Flag for Uttarakhand (IN-UT) 🏴󠁩󠁮󠁴󠁲󠁿 Flag for Tripura (IN-TR) 🏴󠁩󠁱󠁢󠁧󠁿 Flag for Baghdad (IQ-BG) 🏴󠁩󠁮󠁬󠁤󠁿 Flag for Lakshadweep (IN-LD) 🏴󠁩󠁱󠁭󠁡󠁿 Flag for Maysan (IQ-MA) 🏴󠁩󠁱󠁢󠁡󠁿 Flag for Basra (IQ-BA) 🏴󠁩󠁱󠁡󠁲󠁿 Flag for Erbil (IQ-AR) 🏴󠁩󠁮󠁭󠁨󠁿 Flag for Maharashtra (IN-MH) 🏴󠁩󠁱󠁡󠁮󠁿 Flag for Al Anbar (IQ-AN) 🏴󠁩󠁮󠁳󠁫󠁿 Flag for Sikkim (IN-SK) 🏴󠁩󠁱󠁢󠁢󠁿 Flag for Babylon (IQ-BB) 🏴󠁩󠁮󠁵󠁰󠁿 Flag for Uttar Pradesh (IN-UP) 🏴󠁩󠁱󠁳󠁵󠁿 Flag for Sulaymaniyah (IQ-SU) 🏴󠁩󠁮󠁲󠁪󠁿 Flag for Rajasthan (IN-RJ) 🏴󠁩󠁮󠁪󠁫󠁿 Flag for Jammu and Kashmir (IN-JK) 🏴󠁩󠁲󠀰󠀸󠁿 Flag for Chaharmahal and Bakhtiari (IR-08) 🏴󠁩󠁲󠀲󠀶󠁿 Flag for Qom (IR-26) 🏴󠁩󠁳󠀱󠁿 Flag for Capital (IS-1) 👨🏾‍👨🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁩󠁲󠀰󠀳󠁿 Flag for Ardabil (IR-03) 🏴󠁩󠁲󠀲󠀵󠁿 Flag for Yazd (IR-25) 🏴󠁩󠁲󠀲󠀹󠁿 Flag for South Khorasan (IR-29) 👨🏿‍👨🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁩󠁲󠀲󠀴󠁿 Flag for Hamadan (IR-24) 🏴󠁧󠁹󠁭󠁡󠁿 Flag for Mahaica-Berbice (GY-MA) 🏴󠁩󠁳󠀳󠁿 Flag for Western (IS-3) 🏴󠁩󠁲󠀲󠀷󠁿 Flag for Golestan (IR-27) 🏴󠁩󠁲󠀱󠀱󠁿 Flag for Zanjan (IR-11) 🏴󠁩󠁲󠀲󠀰󠁿 Flag for Lorestan (IR-20) 🏴󠁩󠁲󠀱󠀷󠁿 Flag for Kermanshah (IR-17) 🏴󠁩󠁲󠀱󠀸󠁿 Flag for Kohgiluyeh and Boyer-Ahmad (IR-18) 🏴󠁥󠁧󠁣󠁿 Flag for Cairo (EG-C) 🏴󠁩󠁲󠀳󠀱󠁿 Flag for North Khorasan (IR-31) 🏴󠁩󠁲󠀰󠀶󠁿 Flag for Bushehr (IR-06) 🏴󠁥󠁳󠁥󠁸󠁿 Flag for Extremadura (ES-EX) 🏴󠁥󠁳󠁣󠁮󠁿 Flag for Canary Islands (ES-CN) 🏴󠁩󠁳󠀷󠁿 Flag for Eastern (IS-7) 🏴󠁩󠁲󠀰󠀵󠁿 Flag for Ilam (IR-05) 🏴󠁩󠁲󠀲󠀸󠁿 Flag for Qazvin (IR-28) 🏴󠁩󠁲󠀰󠀴󠁿 Flag for Isfahan (IR-04) 🏴󠁩󠁲󠀱󠀵󠁿 Flag for Kerman (IR-15) 🏴󠁩󠁲󠀲󠀳󠁿 Flag for Hormozgan (IR-23) 🏴󠁩󠁱󠁷󠁡󠁿 Flag for Wasit (IQ-WA) 🏴󠁩󠁴󠀲󠀱󠁿 Flag for Piedmont (IT-21) 🏴󠁩󠁳󠀶󠁿 Flag for Northeastern (IS-6) 🏴󠁩󠁳󠀵󠁿 Flag for Northwestern (IS-5) 🏴󠁩󠁲󠀲󠀲󠁿 Flag for Markazi (IR-22) 🏴󠁩󠁲󠀱󠀹󠁿 Flag for Gilan (IR-19) 🏴󠁩󠁲󠀱󠀰󠁿 Flag for Khuzestan (IR-10) 🏴󠁩󠁲󠀱󠀲󠁿 Flag for Semnan (IR-12) 🏴󠁩󠁳󠀲󠁿 Flag for Southern Peninsula (IS-2) 🏴󠁪󠁭󠀱󠀲󠁿 Flag for Manchester (JM-12) 🏴󠁪󠁯󠁩󠁲󠁿 Flag for Irbid (JO-IR) 🏴󠁪󠁭󠀰󠀵󠁿 Flag for Saint Mary (JM-05) 🏴󠁩󠁴󠀷󠀷󠁿 Flag for Basilicata (IT-77) 🏴󠁩󠁴󠀳󠀶󠁿 Flag for Friuli–Venezia Giulia (IT-36) 🏴󠁪󠁭󠀱󠀳󠁿 Flag for Clarendon (JM-13) 🏴󠁩󠁴󠀵󠀷󠁿 Flag for Marche (IT-57) 🏴󠁪󠁭󠀰󠀴󠁿 Flag for Portland (JM-04) 🏴󠁩󠁴󠀸󠀲󠁿 Flag for Sicily (IT-82) 🏴󠁩󠁴󠀳󠀴󠁿 Flag for Veneto (IT-34) 🏴󠁩󠁴󠀶󠀵󠁿 Flag for Abruzzo (IT-65) 🏴󠁩󠁴󠀶󠀷󠁿 Flag for Molise (IT-67) 🏴󠁪󠁯󠁢󠁡󠁿 Flag for Balqa (JO-BA) 🏴󠁩󠁴󠀷󠀵󠁿 Flag for Apulia (IT-75) 🏴󠁩󠁴󠀷󠀸󠁿 Flag for Calabria (IT-78) 🏴󠁩󠁴󠀵󠀲󠁿 Flag for Tuscany (IT-52) 🏴󠁪󠁭󠀰󠀹󠁿 Flag for Hanover (JM-09) 🏴󠁪󠁭󠀰󠀲󠁿 Flag for Saint Andrew (JM-02) 🏴󠁪󠁯󠁡󠁴󠁿 Flag for Tafilah (JO-AT) 🏴󠁩󠁴󠀵󠀵󠁿 Flag for Umbria (IT-55) 🏴󠁪󠁭󠀰󠀸󠁿 Flag for Saint James (JM-08) 🏴󠁪󠁭󠀰󠀶󠁿 Flag for Saint Ann (JM-06) 🏴󠁪󠁭󠀱󠀱󠁿 Flag for Saint Elizabeth (JM-11) 🏴󠁪󠁯󠁡󠁺󠁿 Flag for Zarqa (JO-AZ) 🏴󠁦󠁩󠀱󠀲󠁿 Flag for Ostrobothnia (FI-12) 🏴󠁩󠁴󠀶󠀲󠁿 Flag for Lazio (IT-62) 🏴󠁪󠁯󠁡󠁪󠁿 Flag for Ajloun (JO-AJ) 🏴󠁩󠁴󠀴󠀲󠁿 Flag for Liguria (IT-42) 🏴󠁪󠁭󠀰󠀷󠁿 Flag for Trelawny (JM-07) 🏴󠁪󠁯󠁡󠁱󠁿 Flag for Aqaba (JO-AQ) 🏴󠁪󠁯󠁪󠁡󠁿 Flag for Jerash (JO-JA) 🏴󠁪󠁯󠁡󠁭󠁿 Flag for Amman (JO-AM) 🏴󠁩󠁴󠀲󠀳󠁿 Flag for Aosta Valley (IT-23) 🏴󠁪󠁭󠀱󠀰󠁿 Flag for Westmoreland (JM-10) 🏴󠁪󠁰󠀰󠀸󠁿 Flag for Ibaraki (JP-08) 🏴󠁪󠁯󠁭󠁤󠁿 Flag for Madaba (JO-MD) 🏴󠁪󠁰󠀳󠀲󠁿 Flag for Shimane (JP-32) 🏴󠁪󠁰󠀲󠀶󠁿 Flag for Kyōto (JP-26) 🏴󠁣󠁬󠁡󠁲󠁿 Flag for Araucanía (CL-AR) 🏴󠁪󠁰󠀰󠀹󠁿 Flag for Tochigi (JP-09) 🏴󠁪󠁰󠀰󠀵󠁿 Flag for Akita (JP-05) 🏴󠁪󠁰󠀱󠀲󠁿 Flag for Chiba (JP-12) 🏴󠁪󠁰󠀰󠀴󠁿 Flag for Miyagi (JP-04) 🏴󠁪󠁰󠀱󠀵󠁿 Flag for Niigata (JP-15) 🏴󠁪󠁰󠀱󠀶󠁿 Flag for Toyama (JP-16) 🏴󠁪󠁰󠀲󠀳󠁿 Flag for Aichi (JP-23) 🏴󠁪󠁰󠀳󠀶󠁿 Flag for Tokushima (JP-36) 🏴󠁪󠁰󠀲󠀰󠁿 Flag for Nagano (JP-20) 🏴󠁪󠁰󠀳󠀱󠁿 Flag for Tottori (JP-31) 🏴󠁪󠁰󠀰󠀳󠁿 Flag for Iwate (JP-03) 🏴󠁪󠁰󠀳󠀳󠁿 Flag for Okayama (JP-33) 🏴󠁪󠁰󠀱󠀷󠁿 Flag for Ishikawa (JP-17) 🏴󠁪󠁰󠀳󠀰󠁿 Flag for Wakayama (JP-30) 🏴󠁪󠁰󠀱󠀰󠁿 Flag for Gunma (JP-10) 🏴󠁪󠁯󠁭󠁡󠁿 Flag for Mafraq (JO-MA) 🏴󠁪󠁰󠀳󠀵󠁿 Flag for Yamaguchi (JP-35) 🏴󠁣󠁵󠀱󠀲󠁿 Flag for Granma (CU-12) 🏴󠁪󠁰󠀲󠀵󠁿 Flag for Shiga (JP-25) 🏴󠁪󠁰󠀰󠀲󠁿 Flag for Aomori (JP-02) 🏴󠁪󠁰󠀱󠀱󠁿 Flag for Saitama (JP-11) 🏴󠁪󠁰󠀲󠀹󠁿 Flag for Nara (JP-29) 🏴󠁪󠁰󠀱󠀹󠁿 Flag for Yamanashi (JP-19) 🏴󠁪󠁰󠀳󠀴󠁿 Flag for Hiroshima (JP-34) 🏴󠁪󠁯󠁭󠁮󠁿 Flag for Ma’an (JO-MN) 🏴󠁪󠁰󠀲󠀲󠁿 Flag for Shizuoka (JP-22) 🏴󠁪󠁰󠀲󠀷󠁿 Flag for Ōsaka (JP-27) 🏴󠁪󠁰󠀲󠀴󠁿 Flag for Mie (JP-24) 🏴󠁪󠁰󠀰󠀶󠁿 Flag for Yamagata (JP-06) 🏴󠁪󠁰󠀲󠀸󠁿 Flag for Hyōgo (JP-28) 🏴󠁪󠁯󠁫󠁡󠁿 Flag for Karak (JO-KA) 🏴󠁪󠁰󠀳󠀸󠁿 Flag for Ehime (JP-38) 🏴󠁪󠁰󠀱󠀴󠁿 Flag for Kanagawa (JP-14) 🏴󠁪󠁰󠀳󠀷󠁿 Flag for Kagawa (JP-37) 🏴󠁫󠁥󠀰󠀷󠁿 Flag for Garissa (KE-07) 🏴󠁫󠁥󠀲󠀴󠁿 Flag for Mandera (KE-24) 🏴󠁪󠁰󠀴󠀶󠁿 Flag for Kagoshima (JP-46) 🏴󠁫󠁥󠀱󠀷󠁿 Flag for Kisumu (KE-17) 🏴󠁫󠁥󠀱󠀴󠁿 Flag for Kilifi (KE-14) 🏴󠁫󠁥󠀱󠀵󠁿 Flag for Kirinyaga (KE-15) 🏴󠁫󠁥󠀱󠀰󠁿 Flag for Kajiado (KE-10) 🏴󠁫󠁥󠀰󠀳󠁿 Flag for Bungoma (KE-03) 🏴󠁫󠁥󠀳󠀲󠁿 Flag for Nandi (KE-32) 🏴󠁫󠁥󠀱󠀳󠁿 Flag for Kiambu (KE-13) 🏴󠁫󠁥󠀲󠀰󠁿 Flag for Laikipia (KE-20) 🏴󠁫󠁥󠀲󠀱󠁿 Flag for Lamu (KE-21) 🏴󠁪󠁰󠀴󠀰󠁿 Flag for Fukuoka (JP-40) 🏴󠁫󠁥󠀰󠀴󠁿 Flag for Busia (KE-04) 🏴󠁪󠁰󠀴󠀱󠁿 Flag for Saga (JP-41) 🏴󠁫󠁥󠀲󠀷󠁿 Flag for Migori (KE-27) 🏴󠁫󠁥󠀰󠀶󠁿 Flag for Embu (KE-06) 👩🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁫󠁥󠀱󠀲󠁿 Flag for Kericho (KE-12) 🏴󠁫󠁥󠀰󠀹󠁿 Flag for Isiolo (KE-09) 🏴󠁫󠁥󠀱󠀹󠁿 Flag for Kwale (KE-19) 🏴󠁪󠁰󠀴󠀲󠁿 Flag for Nagasaki (JP-42) 🏴󠁫󠁥󠀳󠀰󠁿 Flag for Nairobi County (KE-30) 🏴󠁫󠁥󠀲󠀳󠁿 Flag for Makueni (KE-23) 🏴󠁫󠁥󠀲󠀹󠁿 Flag for Murang’a (KE-29) 🏴󠁪󠁰󠀳󠀹󠁿 Flag for Kōchi (JP-39) 🏴󠁫󠁥󠀰󠀲󠁿 Flag for Bomet (KE-02) 🏴󠁫󠁥󠀲󠀸󠁿 Flag for Mombasa (KE-28) 🏴󠁫󠁥󠀰󠀸󠁿 Flag for Homa Bay (KE-08) 🏴󠁫󠁥󠀱󠀱󠁿 Flag for Kakamega (KE-11) 🏴󠁫󠁥󠀲󠀲󠁿 Flag for Machakos (KE-22) 🏴󠁫󠁥󠀱󠀶󠁿 Flag for Kisii (KE-16) 🏴󠁫󠁥󠀰󠀵󠁿 Flag for Elgeyo-Marakwet (KE-05) 🏴󠁪󠁰󠀴󠀴󠁿 Flag for Ōita (JP-44) 🏴󠁫󠁥󠀳󠀳󠁿 Flag for Narok (KE-33) 🏴󠁫󠁥󠀲󠀶󠁿 Flag for Meru (KE-26) 🏴󠁪󠁰󠀴󠀳󠁿 Flag for Kumamoto (JP-43) 🏴󠁪󠁰󠀴󠀵󠁿 Flag for Miyazaki (JP-45) 🏴󠁫󠁨󠀱󠀹󠁿 Flag for Stung Treng (KH-19) 🏴󠁫󠁥󠀳󠀷󠁿 Flag for Samburu (KE-37) 🏴󠁫󠁥󠀴󠀷󠁿 Flag for West Pokot (KE-47) 🏴󠁫󠁥󠀳󠀹󠁿 Flag for Taita-Taveta (KE-39) 🏴󠁫󠁨󠀱󠀴󠁿 Flag for Prey Veng (KH-14) 🏴󠁫󠁥󠀴󠀱󠁿 Flag for Tharaka-Nithi (KE-41) 🏴󠁫󠁧󠁯󠁿 Flag for Osh Region (KG-O) 🏴󠁫󠁨󠀲󠀵󠁿 Flag for Tbong Khmum (KH-25) 🏴󠁫󠁧󠁴󠁿 Flag for Talas (KG-T) 🏴󠁫󠁨󠀱󠀲󠁿 Flag for Phnom Penh (KH-12) 🏴󠁫󠁧󠁧󠁢󠁿 Flag for Bishkek (KG-GB) 🏴󠁫󠁥󠀴󠀴󠁿 Flag for Uasin Gishu (KE-44) 🏴󠁫󠁨󠀲󠀳󠁿 Flag for Kep (KH-23) 🏴󠁫󠁨󠀱󠀰󠁿 Flag for Kratié (KH-10) 🏴󠁫󠁨󠀲󠀱󠁿 Flag for Takéo (KH-21) 🏴󠁫󠁨󠀲󠁿 Flag for Battambang (KH-2) 🏴󠁫󠁥󠀳󠀶󠁿 Flag for Nyeri (KE-36) 🏴󠁫󠁨󠀱󠀳󠁿 Flag for Preah Vihear (KH-13) 🏴󠁫󠁥󠀴󠀰󠁿 Flag for Tana River (KE-40) 🏴󠁫󠁨󠀲󠀴󠁿 Flag for Pailin (KH-24) 🏴󠁫󠁨󠀱󠀶󠁿 Flag for Ratanakiri (KH-16) 🏴󠁫󠁨󠀲󠀲󠁿 Flag for Oddar Meanchey (KH-22) 🏴󠁫󠁥󠀴󠀲󠁿 Flag for Trans Nzoia (KE-42) 🏴󠁫󠁨󠀱󠀸󠁿 Flag for Sihanoukville (KH-18) 🏴󠁫󠁥󠀴󠀵󠁿 Flag for Vihiga (KE-45) 🏴󠁫󠁧󠁧󠁯󠁿 Flag for Osh (KG-GO) 🏴󠁫󠁧󠁢󠁿 Flag for Batken (KG-B) 🏴󠁫󠁧󠁪󠁿 Flag for Jalal-Abad (KG-J) 🏴󠁫󠁨󠀱󠀱󠁿 Flag for Mondulkiri (KH-11) 🏴󠁫󠁨󠀱󠀷󠁿 Flag for Siem Reap (KH-17) 🏴󠁫󠁥󠀴󠀳󠁿 Flag for Turkana (KE-43) 🏴󠁫󠁨󠀱󠁿 Flag for Banteay Meanchey (KH-1) 🏴󠁫󠁧󠁮󠁿 Flag for Naryn (KG-N) 🏴󠁫󠁥󠀳󠀵󠁿 Flag for Nyandarua (KE-35) 🏴󠁫󠁥󠀳󠀸󠁿 Flag for Siaya (KE-38) 🏴󠁫󠁥󠀳󠀴󠁿 Flag for Nyamira (KE-34) 🏴󠁫󠁨󠀱󠀵󠁿 Flag for Pursat (KH-15) 🏴󠁫󠁥󠀴󠀶󠁿 Flag for Wajir (KE-46) 🏴󠁫󠁧󠁹󠁿 Flag for Issyk-Kul (KG-Y) 🏴󠁫󠁧󠁣󠁿 Flag for Chuy (KG-C) 🏴󠁫󠁭󠁭󠁿 Flag for Mohéli (KM-M) 🏴󠁫󠁲󠀱󠀱󠁿 Flag for Seoul (KR-11) 🏴󠁫󠁨󠀴󠁿 Flag for Kampong Chhnang (KH-4) 🏴󠁫󠁲󠀳󠀰󠁿 Flag for Daejeon (KR-30) 🏴󠁫󠁰󠀰󠀵󠁿 Flag for South Hwanghae (KP-05) 🏴󠁫󠁨󠀷󠁿 Flag for Kampot (KH-7) 🏴󠁫󠁮󠁮󠁿 Flag for Nevis (KN-N) 🏴󠁫󠁰󠀰󠀴󠁿 Flag for Chagang (KP-04) 🏴󠁫󠁲󠀴󠀶󠁿 Flag for South Jeolla (KR-46) 🏴󠁫󠁰󠀰󠀶󠁿 Flag for North Hwanghae (KP-06) 🏴󠁫󠁮󠁫󠁿 Flag for Saint Kitts (KN-K) 🏴󠁫󠁨󠀵󠁿 Flag for Kampong Speu (KH-5) 🏴󠁫󠁲󠀴󠀵󠁿 Flag for North Jeolla (KR-45) 🏴󠁫󠁰󠀰󠀳󠁿 Flag for North Pyongan (KP-03) 🏴󠁫󠁨󠀹󠁿 Flag for Koh Kong (KH-9) 🏴󠁫󠁰󠀰󠀷󠁿 Flag for Kangwon (KP-07) 🏴󠁫󠁲󠀲󠀶󠁿 Flag for Busan (KR-26) 🏴󠁫󠁲󠀲󠀹󠁿 Flag for Gwangju City (KR-29) 🏴󠁫󠁨󠀳󠁿 Flag for Kampong Cham (KH-3) 🏴󠁫󠁲󠀴󠀳󠁿 Flag for North Chungcheong (KR-43) 🏴󠁫󠁨󠀸󠁿 Flag for Kandal (KH-8) 🏴󠁫󠁨󠀶󠁿 Flag for Kampong Thom (KH-6) 🏴󠁫󠁰󠀱󠀰󠁿 Flag for Ryanggang (KP-10) 🏴󠁫󠁰󠀰󠀲󠁿 Flag for South Pyongan (KP-02) 🏴󠁫󠁭󠁧󠁿 Flag for Grande Comore (KM-G) 🏴󠁫󠁰󠀰󠀸󠁿 Flag for South Hamgyong (KP-08) 🏴󠁫󠁰󠀱󠀳󠁿 Flag for Rason (KP-13) 🏴󠁫󠁲󠀲󠀷󠁿 Flag for Daegu (KR-27) 🏴󠁫󠁲󠀲󠀸󠁿 Flag for Incheon (KR-28) 🏴󠁫󠁲󠀴󠀲󠁿 Flag for Gangwon (KR-42) 🏴󠁫󠁰󠀰󠀱󠁿 Flag for Pyongyang (KP-01) 🏴󠁫󠁲󠀳󠀱󠁿 Flag for Ulsan (KR-31) 🏴󠁫󠁲󠀴󠀴󠁿 Flag for South Chungcheong (KR-44) 🏴󠁫󠁭󠁡󠁿 Flag for Anjouan (KM-A) 🏴󠁫󠁲󠀴󠀱󠁿 Flag for Gyeonggi (KR-41) 🏴󠁫󠁲󠀴󠀷󠁿 Flag for North Gyeongsang (KR-47) 🏴󠁫󠁰󠀰󠀹󠁿 Flag for North Hamgyong (KP-09) 🏴󠁬󠁡󠁨󠁯󠁿 Flag for Houaphanh (LA-HO) 🏴󠁫󠁺󠁢󠁡󠁹󠁿 Flag for Bayqongyr (KZ-BAY) 🏴󠁬󠁡󠁣󠁨󠁿 Flag for Champasak (LA-CH) 🏴󠁬󠁡󠁶󠁴󠁿 Flag for Vientiane (LA-VT) 🏴󠁫󠁷󠁨󠁡󠁿 Flag for Hawalli (KW-HA) 🏴󠁬󠁡󠁰󠁨󠁿 Flag for Phongsaly (LA-PH) 🏴󠁫󠁺󠁰󠁡󠁶󠁿 Flag for Pavlodar (KZ-PAV) 🏴󠁫󠁺󠁡󠁬󠁭󠁿 Flag for Almaty Region (KZ-ALM) 🏴󠁫󠁷󠁫󠁵󠁿 Flag for Al Asimah (KW-KU) 🏴󠁬󠁡󠁢󠁫󠁿 Flag for Bokeo (LA-BK) 🏴󠁬󠁡󠁡󠁴󠁿 Flag for Attapeu (LA-AT) 🏴󠁫󠁺󠁡󠁫󠁴󠁿 Flag for Aktobe (KZ-AKT) 🏴󠁫󠁺󠁡󠁴󠁹󠁿 Flag for Atyrau (KZ-ATY) 🏴󠁫󠁷󠁪󠁡󠁿 Flag for Al Jahra (KW-JA) 🏴󠁬󠁡󠁢󠁬󠁿 Flag for Bolikhamsai (LA-BL) 🏴󠁬󠁡󠁯󠁵󠁿 Flag for Oudomxay (LA-OU) 🏴󠁫󠁺󠁭󠁡󠁮󠁿 Flag for Mangystau (KZ-MAN) 🏴󠁫󠁺󠁺󠁡󠁰󠁿 Flag for West Kazakhstan (KZ-ZAP) 🏴󠁫󠁺󠁺󠁨󠁡󠁿 Flag for Jambyl (KZ-ZHA) 🏴󠁫󠁺󠁡󠁳󠁴󠁿 Flag for Astana (KZ-AST) 🏴󠁬󠁡󠁬󠁰󠁿 Flag for Luang Prabang (LA-LP) 🏴󠁫󠁷󠁦󠁡󠁿 Flag for Al Farwaniyah (KW-FA) 🏴󠁫󠁺󠁫󠁵󠁳󠁿 Flag for Kostanay (KZ-KUS) 🏴󠁫󠁺󠁡󠁬󠁡󠁿 Flag for Almaty (KZ-ALA) 🏴󠁫󠁺󠁫󠁡󠁲󠁿 Flag for Karagandy (KZ-KAR) 🏴󠁫󠁺󠁫󠁺󠁹󠁿 Flag for Kyzylorda (KZ-KZY) 🏴󠁬󠁡󠁳󠁬󠁿 Flag for Salavan (LA-SL) 🏴󠁬󠁡󠁬󠁭󠁿 Flag for Luang Namtha (LA-LM) 🏴󠁫󠁲󠀵󠀰󠁿 Flag for Sejong (KR-50) 🏴󠁫󠁷󠁭󠁵󠁿 Flag for Mubarak Al-Kabeer (KW-MU) 🏴󠁫󠁺󠁳󠁥󠁶󠁿 Flag for North Kazakhstan (KZ-SEV) 👩🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁫󠁷󠁡󠁨󠁿 Flag for Al Ahmadi (KW-AH) 🏴󠁬󠁡󠁫󠁨󠁿 Flag for Khammouane (LA-KH) 🏴󠁫󠁺󠁡󠁫󠁭󠁿 Flag for Akmola (KZ-AKM) 🏴󠁫󠁺󠁹󠁵󠁺󠁿 Flag for South Kazakhstan (KZ-YUZ) 🏴󠁬󠁩󠀰󠀹󠁿 Flag for Triesen (LI-09) 👨🏽‍👨🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 🏴󠁬󠁫󠀷󠁿 Flag for North Central (LK-7) 🏴󠁬󠁡󠁸󠁡󠁿 Flag for Sainyabuli (LA-XA) 🏴󠁬󠁢󠁡󠁫󠁿 Flag for Akkar (LB-AK) 🏴󠁬󠁣󠀰󠀷󠁿 Flag for Laborie (LC-07) 🏴󠁬󠁣󠀰󠀶󠁿 Flag for Gros Islet (LC-06) 🏴󠁬󠁢󠁡󠁳󠁿 Flag for North (LB-AS) 🏴󠁬󠁩󠀰󠀱󠁿 Flag for Balzers (LI-01) 🏴󠁬󠁫󠀲󠁿 Flag for Central (LK-2) 🏴󠁬󠁩󠀰󠀴󠁿 Flag for Mauren (LI-04) 🏴󠁬󠁢󠁮󠁡󠁿 Flag for Nabatieh (LB-NA) 🏴󠁬󠁣󠀰󠀵󠁿 Flag for Dennery (LC-05) 🏴󠁬󠁢󠁪󠁡󠁿 Flag for South (LB-JA) 🏴󠁬󠁩󠀱󠀱󠁿 Flag for Vaduz (LI-11) 🏴󠁬󠁣󠀰󠀲󠁿 Flag for Castries (LC-02) 🏴󠁬󠁫󠀸󠁿 Flag for Uva (LK-8) 🏴󠁬󠁩󠀱󠀰󠁿 Flag for Triesenberg (LI-10) 🏴󠁬󠁩󠀰󠀵󠁿 Flag for Planken (LI-05) 🏴󠁬󠁣󠀱󠀱󠁿 Flag for Vieux Fort (LC-11) 🏴󠁬󠁢󠁢󠁨󠁿 Flag for Baalbek-Hermel (LB-BH) 🏴󠁬󠁫󠀶󠁿 Flag for North Western (LK-6) 🏴󠁬󠁩󠀰󠀶󠁿 Flag for Ruggell (LI-06) 🏴󠁬󠁣󠀰󠀸󠁿 Flag for Micoud (LC-08) 🏴󠁬󠁩󠀰󠀲󠁿 Flag for Eschen (LI-02) 🏴󠁬󠁣󠀱󠀲󠁿 Flag for Canaries (LC-12) 🏴󠁬󠁢󠁢󠁡󠁿 Flag for Beirut (LB-BA) 🏴󠁬󠁡󠁸󠁩󠁿 Flag for Xiangkhouang (LA-XI) 🏴󠁬󠁣󠀱󠀰󠁿 Flag for Soufrière (LC-10) 🏴󠁬󠁣󠀰󠀱󠁿 Flag for Anse la Raye (LC-01) 🏴󠁬󠁣󠀰󠀳󠁿 Flag for Choiseul (LC-03) 🏴󠁬󠁩󠀰󠀳󠁿 Flag for Gamprin (LI-03) 🏴󠁬󠁫󠀴󠁿 Flag for Northern (LK-4) 🏴󠁬󠁲󠁧󠁢󠁿 Flag for Grand Bassa (LR-GB) 🏴󠁬󠁲󠁧󠁰󠁿 Flag for Gbarpolu (LR-GP) 🏴󠁬󠁲󠁧󠁧󠁿 Flag for Grand Gedeh (LR-GG) 🏴󠁬󠁴󠀱󠀲󠁿 Flag for Jurbarkas (LT-12) 🏴󠁬󠁲󠁮󠁩󠁿 Flag for Nimba (LR-NI) 🏴󠁦󠁩󠀰󠀸󠁿 Flag for Central Finland (FI-08) 🏴󠁬󠁴󠀱󠀰󠁿 Flag for Jonava (LT-10) 🏴󠁬󠁲󠁭󠁧󠁿 Flag for Margibi (LR-MG) 🏴󠁬󠁲󠁳󠁩󠁿 Flag for Sinoe (LR-SI) 🏴󠁬󠁲󠁭󠁯󠁿 Flag for Montserrado (LR-MO) 🏴󠁬󠁴󠀱󠀶󠁿 Flag for Kaunas (LT-16) 🏴󠁬󠁳󠁫󠁿 Flag for Thaba-Tseka (LS-K) 🏴󠁬󠁴󠀰󠀵󠁿 Flag for Birštonas (LT-05) 🏴󠁬󠁳󠁦󠁿 Flag for Mohale’s Hoek (LS-F) 🏴󠁬󠁲󠁢󠁭󠁿 Flag for Bomi (LR-BM) 🏴󠁬󠁴󠀰󠀷󠁿 Flag for Druskininkai (LT-07) 🏴󠁬󠁴󠀱󠀴󠁿 Flag for Kalvarija (LT-14) 🏴󠁬󠁴󠀱󠀵󠁿 Flag for Kauno Municipality (LT-15) 🏴󠁬󠁳󠁨󠁿 Flag for Qacha’s Nek (LS-H) 🏴󠁬󠁴󠀰󠀴󠁿 Flag for Anykščiai (LT-04) 🏴󠁬󠁳󠁣󠁿 Flag for Leribe (LS-C) 🏴󠁬󠁴󠀱󠀱󠁿 Flag for Joniškis (LT-11) 🏴󠁬󠁲󠁬󠁯󠁿 Flag for Lofa (LR-LO) 🏴󠁬󠁲󠁲󠁩󠁿 Flag for Rivercess (LR-RI) 🏴󠁬󠁴󠀱󠀳󠁿 Flag for Kaišiadorys (LT-13) 🏴󠁬󠁴󠀰󠀸󠁿 Flag for Elektrėnai (LT-08) 🏴󠁬󠁲󠁧󠁫󠁿 Flag for Grand Kru (LR-GK) 🏴󠁬󠁳󠁤󠁿 Flag for Berea (LS-D) 🏴󠁬󠁳󠁧󠁿 Flag for Quthing (LS-G) 🏴󠁬󠁳󠁢󠁿 Flag for Butha-Buthe (LS-B) 🏴󠁬󠁴󠀰󠀱󠁿 Flag for Akmenė (LT-01) 🏴󠁬󠁴󠀰󠀹󠁿 Flag for Ignalina (LT-09) 🏴󠁬󠁳󠁥󠁿 Flag for Mafeteng (LS-E) 🏴󠁬󠁳󠁪󠁿 Flag for Mokhotlong (LS-J) 🏴󠁬󠁴󠀰󠀳󠁿 Flag for Alytus (LT-03) 🏴󠁬󠁴󠀰󠀶󠁿 Flag for Biržai (LT-06) 🏴󠁣󠁦󠁫󠁢󠁿 Flag for Nana-Grébizi (CF-KB) 🏴󠁬󠁲󠁲󠁧󠁿 Flag for River Gee (LR-RG) 🏴󠁬󠁴󠀵󠀴󠁿 Flag for Utena (LT-54) 🏴󠁬󠁴󠀲󠀷󠁿 Flag for Molėtai (LT-27) 🏴󠁬󠁴󠀴󠀱󠁿 Flag for Šakiai (LT-41) 🏴󠁬󠁴󠀱󠀹󠁿 Flag for Kelmė (LT-19) 🏴󠁬󠁴󠀲󠀳󠁿 Flag for Kupiškis (LT-23) 🏴󠁬󠁴󠀵󠀶󠁿 Flag for Vilkaviškis (LT-56) 🏴󠁬󠁴󠀲󠀸󠁿 Flag for Neringa (LT-28) 🏴󠁬󠁴󠀳󠀳󠁿 Flag for Panevėžys (LT-33) 🏴󠁬󠁴󠀲󠀹󠁿 Flag for Pagėgiai (LT-29) 🏴󠁬󠁴󠀴󠀳󠁿 Flag for Šiaulių Municipality (LT-43) 🏴󠁬󠁴󠀳󠀱󠁿 Flag for Palanga (LT-31) 🏴󠁬󠁴󠀱󠀸󠁿 Flag for Kėdainiai (LT-18) 🏴󠁬󠁴󠀴󠀰󠁿 Flag for Rokiškis (LT-40) 🏴󠁬󠁴󠀴󠀵󠁿 Flag for Šilalė (LT-45) 🏴󠁬󠁴󠀵󠀲󠁿 Flag for Trakai (LT-52) 🏴󠁦󠁭󠁰󠁮󠁩󠁿 Flag for Pohnpei (FM-PNI) 🏴󠁬󠁴󠀳󠀶󠁿 Flag for Prienai (LT-36) 🏴󠁬󠁴󠀵󠀱󠁿 Flag for Telšiai (LT-51) 🏴󠁬󠁴󠀲󠀱󠁿 Flag for Klaipėda (LT-21) 🏴󠁬󠁴󠀱󠀷󠁿 Flag for Kazlų Rūda (LT-17) 🏴󠁬󠁴󠀴󠀷󠁿 Flag for Širvintos (LT-47) 🏴󠁬󠁴󠀳󠀰󠁿 Flag for Pakruojis (LT-30) 🏴󠁬󠁴󠀴󠀴󠁿 Flag for Šiauliai (LT-44) 🏴󠁬󠁴󠀲󠀲󠁿 Flag for Kretinga (LT-22) 🏴󠁬󠁴󠀴󠀶󠁿 Flag for Šilutė (LT-46) 🏴󠁬󠁴󠀴󠀲󠁿 Flag for Šalčininkai (LT-42) 🏴󠁬󠁴󠀳󠀸󠁿 Flag for Raseiniai (LT-38) 🏴󠁬󠁴󠀵󠀵󠁿 Flag for Varėna (LT-55) 🏴󠁬󠁴󠀳󠀴󠁿 Flag for Pasvalys (LT-34) 🏴󠁬󠁴󠀳󠀵󠁿 Flag for Plungė (LT-35) 🏴󠁬󠁴󠀴󠀹󠁿 Flag for Švenčionys (LT-49) 🏴󠁬󠁴󠀳󠀷󠁿 Flag for Radviliškis (LT-37) 🏴󠁬󠁴󠀲󠀴󠁿 Flag for Lazdijai (LT-24) 🏴󠁬󠁴󠀵󠀰󠁿 Flag for Tauragė (LT-50) 🏴󠁬󠁴󠀴󠀸󠁿 Flag for Skuodas (LT-48) 🏴󠁬󠁴󠀵󠀳󠁿 Flag for Ukmergė (LT-53) 🏴󠁬󠁴󠀳󠀹󠁿 Flag for Rietavas (LT-39) 🏴󠁬󠁴󠀲󠀵󠁿 Flag for Marijampolė (LT-25) 🏴󠁬󠁴󠀲󠀶󠁿 Flag for Mažeikiai (LT-26) 🏴󠁬󠁶󠀰󠀱󠀳󠁿 Flag for Baldone (LV-013) 🏴󠁬󠁴󠁶󠁬󠁿 Flag for Vilnius County (LT-VL) 🏴󠁬󠁶󠀰󠀰󠀶󠁿 Flag for Alsunga (LV-006) 🏴󠁬󠁴󠀵󠀸󠁿 Flag for Vilnius (LT-58) 🏴󠁬󠁴󠁴󠁡󠁿 Flag for Tauragė County (LT-TA) 🏴󠁬󠁴󠁵󠁴󠁿 Flag for Utena County (LT-UT) 🏴󠁬󠁶󠀰󠀰󠀲󠁿 Flag for Aizkraukle (LV-002) 🏴󠁬󠁵󠁤󠁩󠁿 Flag for Diekirch (LU-DI) 🏴󠁬󠁴󠁭󠁲󠁿 Flag for Marijampolė County (LT-MR) 👩🏽‍👨🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁬󠁴󠁳󠁡󠁿 Flag for Šiauliai County (LT-SA) 🏴󠁬󠁵󠁥󠁣󠁿 Flag for Echternach (LU-EC) 🏴󠁬󠁵󠁲󠁤󠁿 Flag for Redange (LU-RD) 🏴󠁬󠁵󠁣󠁬󠁿 Flag for Clervaux (LU-CL) 🏴󠁬󠁴󠀵󠀹󠁿 Flag for Visaginas (LT-59) 🏴󠁬󠁶󠀰󠀰󠀹󠁿 Flag for Ape (LV-009) 🏴󠁬󠁶󠀰󠀰󠀸󠁿 Flag for Amata (LV-008) 🏴󠁬󠁴󠁡󠁬󠁿 Flag for Alytus County (LT-AL) 🏴󠁬󠁵󠁧󠁲󠁿 Flag for Grevenmacher (LU-GR) 🏴󠁬󠁶󠀰󠀰󠀱󠁿 Flag for Aglona (LV-001) 🏴󠁬󠁵󠁭󠁥󠁿 Flag for Mersch (LU-ME) 🏴󠁬󠁵󠁶󠁤󠁿 Flag for Vianden (LU-VD) 🏴󠁬󠁶󠀰󠀰󠀵󠁿 Flag for Aloja (LV-005) 🏴󠁬󠁢󠁪󠁬󠁿 Flag for Mount Lebanon (LB-JL) 🏴󠁬󠁴󠁫󠁵󠁿 Flag for Kaunas County (LT-KU) 🏴󠁬󠁴󠀶󠀰󠁿 Flag for Zarasai (LT-60) 🏴󠁬󠁵󠁷󠁩󠁿 Flag for Wiltz (LU-WI) 🏴󠁬󠁶󠀰󠀱󠀱󠁿 Flag for Ādaži (LV-011) 🏴󠁬󠁵󠁬󠁵󠁿 Flag for Luxembourg (LU-LU) 🏴󠁬󠁴󠁴󠁥󠁿 Flag for Telšiai County (LT-TE) 🏴󠁬󠁶󠀰󠀰󠀷󠁿 Flag for Alūksne (LV-007) 🏴󠁬󠁵󠁲󠁭󠁿 Flag for Remich (LU-RM) 🏴󠁬󠁶󠀰󠀰󠀴󠁿 Flag for Aknīste (LV-004) 🏴󠁬󠁵󠁥󠁳󠁿 Flag for Esch-sur-Alzette (LU-ES) 🏴󠁬󠁶󠀰󠀰󠀳󠁿 Flag for Aizpute (LV-003) 🏴󠁬󠁴󠁫󠁬󠁿 Flag for Klaipėda County (LT-KL) 🏴󠁬󠁶󠀰󠀲󠀷󠁿 Flag for Dundaga (LV-027) 🏴󠁬󠁶󠀰󠀴󠀰󠁿 Flag for Jaunpils (LV-040) 🏴󠁬󠁶󠀰󠀱󠀹󠁿 Flag for Burtnieki (LV-019) 🏴󠁬󠁶󠀰󠀱󠀵󠁿 Flag for Balvi (LV-015) 🏴󠁬󠁶󠀰󠀱󠀷󠁿 Flag for Beverīna (LV-017) 🏴󠁬󠁶󠀰󠀲󠀵󠁿 Flag for Daugavpils Municipality (LV-025) 🏴󠁬󠁶󠀰󠀲󠀱󠁿 Flag for Cesvaine (LV-021) 🏴󠁬󠁶󠀰󠀳󠀶󠁿 Flag for Ilūkste (LV-036) 🏴󠁬󠁶󠀰󠀵󠀰󠁿 Flag for Kuldīga (LV-050) 🏴󠁬󠁶󠀰󠀳󠀲󠁿 Flag for Grobiņa (LV-032) 🏴󠁬󠁶󠀰󠀳󠀳󠁿 Flag for Gulbene (LV-033) 🏴󠁬󠁶󠀰󠀴󠀳󠁿 Flag for Kandava (LV-043) 🏴󠁬󠁶󠀰󠀱󠀸󠁿 Flag for Brocēni (LV-018) 🏴󠁬󠁶󠀰󠀴󠀸󠁿 Flag for Krimulda (LV-048) 🏴󠁬󠁶󠀰󠀲󠀰󠁿 Flag for Carnikava (LV-020) 🏴󠁬󠁶󠀰󠀴󠀹󠁿 Flag for Krustpils (LV-049) 👩🏾‍👨🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁬󠁶󠀰󠀲󠀶󠁿 Flag for Dobele (LV-026) 🏴󠁬󠁶󠀰󠀴󠀵󠁿 Flag for Kocēni (LV-045) 🏴󠁬󠁶󠀰󠀳󠀱󠁿 Flag for Garkalne (LV-031) 🏴󠁬󠁶󠀰󠀳󠀰󠁿 Flag for Ērgļi (LV-030) 🏴󠁬󠁶󠀰󠀲󠀸󠁿 Flag for Durbe (LV-028) 🏴󠁬󠁶󠀰󠀴󠀷󠁿 Flag for Krāslava (LV-047) 🏴󠁬󠁶󠀰󠀲󠀴󠁿 Flag for Dagda (LV-024) 🏴󠁬󠁶󠀰󠀳󠀸󠁿 Flag for Jaunjelgava (LV-038) 🏴󠁬󠁶󠀰󠀱󠀶󠁿 Flag for Bauska (LV-016) 🏴󠁬󠁶󠀰󠀱󠀴󠁿 Flag for Baltinava (LV-014) 🏴󠁬󠁶󠀰󠀴󠀲󠁿 Flag for Jēkabpils Municipality (LV-042) 🏴󠁬󠁶󠀰󠀳󠀹󠁿 Flag for Jaunpiebalga (LV-039) 🏴󠁬󠁶󠀰󠀲󠀲󠁿 Flag for Cēsis (LV-022) 🏴󠁬󠁶󠀰󠀳󠀴󠁿 Flag for Iecava (LV-034) 🏴󠁬󠁶󠀰󠀵󠀱󠁿 Flag for Ķegums (LV-051) 🏴󠁬󠁶󠀰󠀳󠀵󠁿 Flag for Ikšķile (LV-035) 🏴󠁬󠁶󠀰󠀲󠀳󠁿 Flag for Cibla (LV-023) 🏴󠁬󠁶󠀰󠀴󠀴󠁿 Flag for Kārsava (LV-044) 🏴󠁬󠁶󠀰󠀲󠀹󠁿 Flag for Engure (LV-029) 🏴󠁬󠁶󠀰󠀵󠀵󠁿 Flag for Līgatne (LV-055) 🏴󠁬󠁶󠀰󠀶󠀶󠁿 Flag for Nīca (LV-066) 🏴󠁬󠁶󠀰󠀶󠀱󠁿 Flag for Mālpils (LV-061) 🏴󠁧󠁥󠁫󠁫󠁿 Flag for Kvemo Kartli (GE-KK) 🏴󠁬󠁶󠀰󠀷󠀰󠁿 Flag for Pārgauja (LV-070) 🏴󠁬󠁶󠀰󠀵󠀳󠁿 Flag for Lielvārde (LV-053) 🏴󠁬󠁶󠀰󠀷󠀲󠁿 Flag for Pļaviņas (LV-072) 🏴󠁬󠁶󠀰󠀷󠀱󠁿 Flag for Pāvilosta (LV-071) 🏴󠁬󠁶󠀰󠀵󠀹󠁿 Flag for Madona (LV-059) 🏴󠁬󠁶󠀰󠀷󠀶󠁿 Flag for Rauna (LV-076) 🏴󠁬󠁶󠀰󠀵󠀴󠁿 Flag for Limbaži (LV-054) 🏴󠁬󠁶󠀰󠀶󠀴󠁿 Flag for Naukšēni (LV-064) 🏴󠁬󠁶󠀰󠀵󠀲󠁿 Flag for Ķekava (LV-052) 🏴󠁬󠁶󠀰󠀸󠀷󠁿 Flag for Salaspils (LV-087) 🏴󠁬󠁶󠀰󠀶󠀳󠁿 Flag for Mērsrags (LV-063) 🏴󠁬󠁶󠀰󠀶󠀸󠁿 Flag for Olaine (LV-068) 🏴󠁬󠁶󠀰󠀷󠀹󠁿 Flag for Roja (LV-079) 🏴󠁬󠁶󠀰󠀸󠀱󠁿 Flag for Rucava (LV-081) 🏴󠁬󠁶󠀰󠀸󠀲󠁿 Flag for Rugāji (LV-082) 🏴󠁬󠁶󠀰󠀶󠀷󠁿 Flag for Ogre (LV-067) 🏴󠁬󠁶󠀰󠀸󠀴󠁿 Flag for Rūjiena (LV-084) 🏴󠁬󠁶󠀰󠀸󠀹󠁿 Flag for Saulkrasti (LV-089) 🏴󠁬󠁶󠀰󠀸󠀸󠁿 Flag for Saldus (LV-088) 🏴󠁬󠁶󠀰󠀸󠀳󠁿 Flag for Rundāle (LV-083) 🏴󠁬󠁶󠀰󠀶󠀵󠁿 Flag for Nereta (LV-065) 🏴󠁬󠁶󠀰󠀶󠀹󠁿 Flag for Ozolnieki (LV-069) 🏴󠁬󠁶󠀰󠀸󠀰󠁿 Flag for Ropaži (LV-080) 🏴󠁬󠁶󠀰󠀷󠀸󠁿 Flag for Riebiņi (LV-078) 🏴󠁬󠁶󠀰󠀵󠀶󠁿 Flag for Līvāni (LV-056) 🏴󠁬󠁶󠀰󠀷󠀵󠁿 Flag for Priekuļi (LV-075) 🏴󠁬󠁶󠀰󠀵󠀸󠁿 Flag for Ludza (LV-058) 🏴󠁬󠁶󠀰󠀹󠀰󠁿 Flag for Sēja (LV-090) 🏴󠁬󠁶󠀰󠀷󠀴󠁿 Flag for Priekule (LV-074) 🏴󠁬󠁶󠀰󠀵󠀷󠁿 Flag for Lubāna (LV-057) 🏴󠁬󠁶󠀰󠀸󠀶󠁿 Flag for Salacgrīva (LV-086) 🏴󠁬󠁶󠀰󠀶󠀲󠁿 Flag for Mārupe (LV-062) 🏴󠁬󠁶󠀰󠀷󠀳󠁿 Flag for Preiļi (LV-073) 🏴󠁬󠁶󠀱󠀰󠀷󠁿 Flag for Viesīte (LV-107) 🏴󠁬󠁶󠀰󠀹󠀴󠁿 Flag for Smiltene (LV-094) 🏴󠁬󠁹󠁫󠁦󠁿 Flag for Kufra (LY-KF) 🏴󠁬󠁶󠁤󠁧󠁶󠁿 Flag for Daugavpils (LV-DGV) 🏴󠁬󠁶󠀰󠀹󠀹󠁿 Flag for Tukums (LV-099) 👩🏿‍👨🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁬󠁶󠁬󠁰󠁸󠁿 Flag for Liepāja (LV-LPX) 🏴󠁬󠁶󠀱󠀰󠀱󠁿 Flag for Valka (LV-101) 🏴󠁬󠁶󠀱󠀰󠀳󠁿 Flag for Vārkava (LV-103) 🏴󠁬󠁹󠁭󠁢󠁿 Flag for Murqub (LY-MB) 🏴󠁬󠁶󠁶󠁥󠁮󠁿 Flag for Ventspils (LV-VEN) 🏴󠁬󠁹󠁪󠁡󠁿 Flag for Jabal al Akhdar (LY-JA) 🏴󠁬󠁶󠁪󠁫󠁢󠁿 Flag for Jēkabpils (LV-JKB) 🏴󠁬󠁶󠀰󠀹󠀱󠁿 Flag for Sigulda (LV-091) 🏴󠁬󠁹󠁪󠁧󠁿 Flag for Jabal al Gharbi (LY-JG) 🏴󠁬󠁹󠁧󠁴󠁿 Flag for Ghat (LY-GT) 🏴󠁬󠁶󠀰󠀹󠀵󠁿 Flag for Stopiņi (LV-095) 🏴󠁬󠁶󠁲󠁩󠁸󠁿 Flag for Riga (LV-RIX) 🏴󠁬󠁹󠁤󠁲󠁿 Flag for Derna (LY-DR) 🏴󠁬󠁶󠀱󠀰󠀰󠁿 Flag for Vaiņode (LV-100) 🏴󠁬󠁶󠀱󠀰󠀲󠁿 Flag for Varakļāni (LV-102) 🏴󠁬󠁶󠁪󠁥󠁬󠁿 Flag for Jelgava (LV-JEL) 🏴󠁬󠁶󠀰󠀹󠀲󠁿 Flag for Skrīveri (LV-092) 🏴󠁬󠁶󠀰󠀹󠀷󠁿 Flag for Talsi (LV-097) 🏴󠁬󠁶󠁶󠁭󠁲󠁿 Flag for Valmiera (LV-VMR) 🏴󠁬󠁹󠁢󠁡󠁿 Flag for Benghazi (LY-BA) 🏴󠁬󠁶󠁲󠁥󠁺󠁿 Flag for Rēzekne (LV-REZ) 🏴󠁬󠁶󠀰󠀹󠀳󠁿 Flag for Skrunda (LV-093) 🏴󠁬󠁶󠀱󠀱󠀰󠁿 Flag for Zilupe (LV-110) 🏴󠁬󠁶󠀰󠀹󠀶󠁿 Flag for Strenči (LV-096) 🏴󠁬󠁹󠁪󠁵󠁿 Flag for Jufra (LY-JU) 🏴󠁬󠁶󠀱󠀰󠀴󠁿 Flag for Vecpiebalga (LV-104) 🏴󠁬󠁶󠀱󠀰󠀵󠁿 Flag for Vecumnieki (LV-105) 🏴󠁬󠁶󠀱󠀰󠀸󠁿 Flag for Viļaka (LV-108) 🏴󠁬󠁶󠁪󠁵󠁲󠁿 Flag for Jūrmala (LV-JUR) 🏴󠁬󠁶󠀱󠀰󠀹󠁿 Flag for Viļāni (LV-109) 🏴󠁬󠁶󠀰󠀹󠀸󠁿 Flag for Tērvete (LV-098) 🏴󠁭󠁡󠀰󠀸󠁿 Flag for Grand Casablanca (MA-08) 🏴󠁬󠁹󠁭󠁪󠁿 Flag for Marj (LY-MJ) 🏴󠁬󠁹󠁷󠁡󠁿 Flag for Al Wahat (LY-WA) 🏴󠁭󠁣󠁭󠁣󠁿 Flag for Monte Carlo (MC-MC) 🏴󠁭󠁡󠀱󠀴󠁿 Flag for Guelmim-Es Semara (MA-14) 🏴󠁬󠁹󠁺󠁡󠁿 Flag for Zawiya (LY-ZA) 🏴󠁭󠁡󠀰󠀲󠁿 Flag for Gharb-Chrarda-Béni Hssen (MA-02) 🏴󠁭󠁡󠀱󠀱󠁿 Flag for Marrakesh-Tensift-El Haouz (MA-11) 🏴󠁭󠁡󠀱󠀰󠁿 Flag for Doukkala-Abda (MA-10) 👩🏽‍👩🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁭󠁡󠀰󠀷󠁿 Flag for Rabat-Salé-Zemmour-Zaer (MA-07) 🏴󠁭󠁡󠀱󠀶󠁿 Flag for Oued Ed-Dahab-Lagouira (MA-16) 🏴󠁬󠁹󠁮󠁬󠁿 Flag for Nalut (LY-NL) 🏴󠁬󠁹󠁳󠁢󠁿 Flag for Sabha (LY-SB) 🏴󠁭󠁡󠀰󠀳󠁿 Flag for Taza-Al Hoceima-Taounate (MA-03) 🏴󠁭󠁣󠁪󠁥󠁿 Flag for Jardin Exotique de Monaco (MC-JE) 🏴󠁬󠁹󠁷󠁳󠁿 Flag for Wadi al Shatii (LY-WS) 🏴󠁭󠁣󠁬󠁡󠁿 Flag for Larvotto (MC-LA) 🏴󠁬󠁹󠁮󠁱󠁿 Flag for Nuqat al Khams (LY-NQ) 🏴󠁭󠁣󠁭󠁡󠁿 Flag for Malbousquet (MC-MA) 🏴󠁭󠁡󠀱󠀲󠁿 Flag for Tadla-Azilal (MA-12) 🏴󠁭󠁣󠁣󠁯󠁿 Flag for La Condamine (MC-CO) 🏴󠁭󠁣󠁭󠁯󠁿 Flag for Monaco-Ville (MC-MO) 🏴󠁭󠁡󠀰󠀹󠁿 Flag for Chaouia-Ouardigha (MA-09) 🏴󠁭󠁡󠀰󠀱󠁿 Flag for Tangier-Tétouan (MA-01) 🏴󠁭󠁣󠁭󠁧󠁿 Flag for Moneghetti (MC-MG) 🏴󠁬󠁹󠁭󠁱󠁿 Flag for Murzuq (LY-MQ) 🏴󠁭󠁡󠀰󠀶󠁿 Flag for Meknès-Tafilalet (MA-06) 🏴󠁭󠁣󠁦󠁯󠁿 Flag for Fontvieille (MC-FO) 🏴󠁬󠁹󠁷󠁤󠁿 Flag for Wadi al Hayaa (LY-WD) 🏴󠁭󠁣󠁣󠁬󠁿 Flag for La Colle (MC-CL) 🏴󠁬󠁹󠁳󠁲󠁿 Flag for Sirte (LY-SR) 🏴󠁬󠁹󠁭󠁩󠁿 Flag for Misrata (LY-MI) 🏴󠁭󠁡󠀰󠀵󠁿 Flag for Fès-Boulemane (MA-05) 🏴󠁬󠁹󠁴󠁢󠁿 Flag for Tripoli (LY-TB) 🏴󠁭󠁣󠁧󠁡󠁿 Flag for La Gare (MC-GA) 👩🏾‍👩🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁭󠁤󠁥󠁤󠁿 Flag for Edineț (MD-ED) 🏴󠁭󠁤󠁨󠁩󠁿 Flag for Hîncești (MD-HI) 🏴󠁭󠁤󠁦󠁡󠁿 Flag for Fălești (MD-FA) 🏴󠁭󠁤󠁣󠁲󠁿 Flag for Criuleni (MD-CR) 🏴󠁭󠁤󠁳󠁩󠁿 Flag for Sîngerei (MD-SI) 🏴󠁭󠁤󠁳󠁯󠁿 Flag for Soroca (MD-SO) 🏴󠁭󠁤󠁣󠁴󠁿 Flag for Cantemir (MD-CT) 🏴󠁭󠁤󠁲󠁥󠁿 Flag for Rezina (MD-RE) 🏴󠁭󠁤󠁳󠁤󠁿 Flag for Șoldănești (MD-SD) 🏴󠁭󠁤󠁢󠁲󠁿 Flag for Briceni (MD-BR) 🏴󠁭󠁣󠁶󠁲󠁿 Flag for Vallon de la Rousse (MC-VR) 🏴󠁭󠁤󠁢󠁡󠁿 Flag for Bălţi (MD-BA) 🏴󠁭󠁤󠁤󠁵󠁿 Flag for Dubăsari (MD-DU) 🏴󠁭󠁤󠁣󠁬󠁿 Flag for Călărași (MD-CL) 🏴󠁭󠁣󠁳󠁰󠁿 Flag for Spélugues (MC-SP) 🏴󠁭󠁤󠁣󠁡󠁿 Flag for Cahul (MD-CA) 🏴󠁭󠁤󠁩󠁡󠁿 Flag for Ialoveni (MD-IA) 🏴󠁭󠁤󠁯󠁲󠁿 Flag for Orhei (MD-OR) 🏴󠁭󠁤󠁤󠁲󠁿 Flag for Drochia (MD-DR) 🏴󠁭󠁤󠁧󠁡󠁿 Flag for Gagauzia (MD-GA) 🏴󠁭󠁤󠁣󠁭󠁿 Flag for Cimișlia (MD-CM) 🏴󠁭󠁤󠁯󠁣󠁿 Flag for Ocniţa (MD-OC) 🏴󠁭󠁤󠁢󠁳󠁿 Flag for Basarabeasca (MD-BS) 🏴󠁭󠁤󠁳󠁴󠁿 Flag for Strășeni (MD-ST) 🏴󠁭󠁤󠁡󠁮󠁿 Flag for Anenii Noi (MD-AN) 🏴󠁭󠁣󠁭󠁵󠁿 Flag for Moulins (MC-MU) 🏴󠁭󠁤󠁢󠁤󠁿 Flag for Bender (MD-BD) 🏴󠁭󠁤󠁧󠁬󠁿 Flag for Glodeni (MD-GL) 🏴󠁭󠁣󠁳󠁯󠁿 Flag for La Source (MC-SO) 🏴󠁭󠁤󠁣󠁵󠁿 Flag for Chișinău (MD-CU) 🏴󠁭󠁤󠁤󠁯󠁿 Flag for Dondușeni (MD-DO) 🏴󠁭󠁤󠁦󠁬󠁿 Flag for Florești (MD-FL) 🏴󠁭󠁣󠁰󠁨󠁿 Flag for Port Hercules (MC-PH) 🏴󠁭󠁤󠁮󠁩󠁿 Flag for Nisporeni (MD-NI) 🏴󠁭󠁤󠁲󠁩󠁿 Flag for Rîșcani (MD-RI) 🏴󠁭󠁤󠁬󠁥󠁿 Flag for Leova (MD-LE) 🏴󠁭󠁤󠁳󠁶󠁿 Flag for Ştefan Vodă (MD-SV) 🏴󠁭󠁤󠁵󠁮󠁿 Flag for Ungheni (MD-UN) 🏴󠁭󠁧󠁡󠁿 Flag for Toamasina (MG-A) 🏴󠁭󠁧󠁴󠁿 Flag for Antananarivo (MG-T) 🏴󠁭󠁥󠀰󠀶󠁿 Flag for Cetinje (ME-06) 🏴󠁭󠁫󠀰󠀵󠁿 Flag for Bogdanci (MK-05) 🏴󠁭󠁥󠀲󠀰󠁿 Flag for Ulcinj (ME-20) 🏴󠁭󠁥󠀰󠀹󠁿 Flag for Kolašin (ME-09) 🏴󠁭󠁫󠀰󠀷󠁿 Flag for Bosilovo (MK-07) 🏴󠁭󠁥󠀱󠀴󠁿 Flag for Pljevlja (ME-14) 🏴󠁭󠁤󠁴󠁥󠁿 Flag for Telenești (MD-TE) 🏴󠁭󠁫󠀰󠀶󠁿 Flag for Bogovinje (MK-06) 🏴󠁭󠁥󠀲󠀱󠁿 Flag for Žabljak (ME-21) 🏴󠁭󠁥󠀰󠀸󠁿 Flag for Herceg Novi (ME-08) 🏴󠁭󠁥󠀲󠀳󠁿 Flag for Petnjica (ME-23) 🏴󠁭󠁥󠀱󠀷󠁿 Flag for Rožaje (ME-17) 🏴󠁭󠁥󠀰󠀵󠁿 Flag for Budva (ME-05) 🏴󠁭󠁥󠀰󠀲󠁿 Flag for Bar (ME-02) 🏴󠁭󠁫󠀰󠀳󠁿 Flag for Berovo (MK-03) 🏴󠁭󠁥󠀱󠀹󠁿 Flag for Tivat (ME-19) 🏴󠁭󠁥󠀱󠀵󠁿 Flag for Plužine (ME-15) 🏴󠁭󠁥󠀱󠀰󠁿 Flag for Kotor (ME-10) 🏴󠁭󠁨󠁬󠁿 Flag for Ralik Chain (MH-L) 🏴󠁭󠁥󠀰󠀷󠁿 Flag for Danilovgrad (ME-07) 🏴󠁭󠁥󠀱󠀳󠁿 Flag for Plav (ME-13) 🏴󠁭󠁫󠀰󠀴󠁿 Flag for Bitola (MK-04) 🏴󠁭󠁥󠀰󠀴󠁿 Flag for Bijelo Polje (ME-04) 🏴󠁭󠁥󠀰󠀱󠁿 Flag for Andrijevica (ME-01) 👩🏿‍👩🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁭󠁥󠀱󠀲󠁿 Flag for Nikšić (ME-12) 🏴󠁭󠁤󠁴󠁡󠁿 Flag for Taraclia (MD-TA) 🏴󠁭󠁥󠀱󠀱󠁿 Flag for Mojkovac (ME-11) 🏴󠁭󠁧󠁭󠁿 Flag for Mahajanga (MG-M) 🏴󠁭󠁥󠀲󠀲󠁿 Flag for Gusinje (ME-22) 🏴󠁭󠁧󠁦󠁿 Flag for Fianarantsoa (MG-F) 🏴󠁭󠁥󠀱󠀸󠁿 Flag for Šavnik (ME-18) 🏴󠁭󠁥󠀱󠀶󠁿 Flag for Podgorica (ME-16) 🏴󠁭󠁧󠁵󠁿 Flag for Toliara (MG-U) 🏴󠁭󠁧󠁤󠁿 Flag for Antsiranana (MG-D) 🏴󠁭󠁫󠀴󠀳󠁿 Flag for Kratovo (MK-43) 🏴󠁭󠁫󠀴󠀴󠁿 Flag for Kriva Palanka (MK-44) 🏴󠁭󠁫󠀵󠀲󠁿 Flag for Makedonski Brod (MK-52) 🏴󠁭󠁫󠀳󠀵󠁿 Flag for Jegunovce (MK-35) 🏴󠁭󠁫󠀴󠀹󠁿 Flag for Lozovo (MK-49) 🏴󠁭󠁫󠀴󠀷󠁿 Flag for Kumanovo (MK-47) 🏴󠁭󠁫󠀱󠀲󠁿 Flag for Vevčani (MK-12) 🏴󠁭󠁫󠀲󠀴󠁿 Flag for Demir Kapija (MK-24) 🏴󠁭󠁫󠀱󠀱󠁿 Flag for Vasilevo (MK-11) 🏴󠁭󠁫󠀳󠀰󠁿 Flag for Želino (MK-30) 🏴󠁭󠁫󠀳󠀶󠁿 Flag for Kavadarci (MK-36) 🏴󠁭󠁫󠀳󠀲󠁿 Flag for Zelenikovo (MK-32) 🏴󠁭󠁫󠀴󠀱󠁿 Flag for Konče (MK-41) 🏴󠁭󠁫󠀱󠀴󠁿 Flag for Vinica (MK-14) 🏴󠁭󠁫󠀱󠀰󠁿 Flag for Valandovo (MK-10) 🏴󠁭󠁫󠀵󠀵󠁿 Flag for Novaci (MK-55) 🏴󠁭󠁫󠀵󠀶󠁿 Flag for Novo Selo (MK-56) 🏴󠁭󠁫󠀳󠀴󠁿 Flag for Ilinden (MK-34) 🏴󠁭󠁫󠀵󠀱󠁿 Flag for Makedonska Kamenica (MK-51) 🏴󠁭󠁫󠀱󠀶󠁿 Flag for Vrapčište (MK-16) 🏴󠁭󠁫󠀰󠀸󠁿 Flag for Brvenica (MK-08) 🏴󠁭󠁫󠀲󠀰󠁿 Flag for Gradsko (MK-20) 🏴󠁭󠁫󠀵󠀰󠁿 Flag for Mavrovo and Rostuša (MK-50) 🏴󠁭󠁫󠀲󠀲󠁿 Flag for Debarca (MK-22) 🏴󠁭󠁫󠀱󠀹󠁿 Flag for Gostivar (MK-19) 🏴󠁭󠁫󠀵󠀳󠁿 Flag for Mogila (MK-53) 🏴󠁭󠁫󠀴󠀸󠁿 Flag for Lipkovo (MK-48) 🏴󠁭󠁫󠀳󠀷󠁿 Flag for Karbinci (MK-37) 🏴󠁭󠁫󠀳󠀳󠁿 Flag for Zrnovci (MK-33) 🏴󠁭󠁫󠀵󠀴󠁿 Flag for Negotino (MK-54) 🏴󠁭󠁫󠀴󠀰󠁿 Flag for Kičevo (MK-40) 🏴󠁭󠁫󠀲󠀱󠁿 Flag for Debar (MK-21) 🏴󠁭󠁫󠀱󠀳󠁿 Flag for Veles (MK-13) 🏴󠁭󠁫󠀲󠀶󠁿 Flag for Dojran (MK-26) 🏴󠁭󠁫󠀱󠀸󠁿 Flag for Gevgelija (MK-18) 🏴󠁭󠁫󠀴󠀲󠁿 Flag for Kočani (MK-42) 🏴󠁭󠁫󠀴󠀵󠁿 Flag for Krivogaštani (MK-45) 🏴󠁭󠁫󠀲󠀳󠁿 Flag for Delčevo (MK-23) 🏴󠁭󠁫󠀴󠀶󠁿 Flag for Kruševo (MK-46) 🏴󠁭󠁫󠀸󠀲󠁿 Flag for Čučer-Sandevo (MK-82) 🏴󠁭󠁫󠀶󠀲󠁿 Flag for Prilep (MK-62) 🏴󠁭󠁫󠀷󠀸󠁿 Flag for Centar Župa (MK-78) 🏴󠁭󠁭󠀰󠀴󠁿 Flag for Mandalay (MM-04) 🏴󠁭󠁬󠀴󠁿 Flag for Ségou (ML-4) 🏴󠁭󠁫󠀵󠀹󠁿 Flag for Petrovec (MK-59) 🏴󠁭󠁫󠀸󠀱󠁿 Flag for Češinovo-Obleševo (MK-81) 🏴󠁭󠁬󠀸󠁿 Flag for Kidal (ML-8) 🏴󠁭󠁭󠀰󠀲󠁿 Flag for Bago (MM-02) 🏴󠁭󠁫󠀷󠀲󠁿 Flag for Struga (MK-72) 🏴󠁭󠁫󠀷󠀵󠁿 Flag for Tearce (MK-75) 🏴󠁭󠁫󠀷󠀴󠁿 Flag for Studeničani (MK-74) 🏴󠁭󠁫󠀵󠀸󠁿 Flag for Ohrid (MK-58) 🏴󠁭󠁫󠀶󠀹󠁿 Flag for Sveti Nikole (MK-69) 🏴󠁭󠁫󠀷󠀳󠁿 Flag for Strumica (MK-73) 🏴󠁭󠁬󠀳󠁿 Flag for Sikasso (ML-3) 🏴󠁭󠁭󠀱󠀱󠁿 Flag for Kachin (MM-11) 🏴󠁭󠁫󠀶󠀶󠁿 Flag for Resen (MK-66) 🏴󠁭󠁬󠁢󠁫󠁯󠁿 Flag for Bamako (ML-BKO) 🏴󠁭󠁭󠀰󠀳󠁿 Flag for Magway (MM-03) 🏴󠁭󠁫󠀷󠀰󠁿 Flag for Sopište (MK-70) 🏴󠁭󠁫󠀷󠀱󠁿 Flag for Staro Nagoričane (MK-71) 🏴󠁭󠁭󠀰󠀷󠁿 Flag for Ayeyarwady (MM-07) 🏴󠁭󠁬󠀷󠁿 Flag for Gao (ML-7) 🏴󠁭󠁬󠀵󠁿 Flag for Mopti (ML-5) 🏴󠁭󠁫󠀸󠀳󠁿 Flag for Štip (MK-83) 🏴󠁭󠁭󠀱󠀲󠁿 Flag for Kayah (MM-12) 🏴󠁭󠁭󠀰󠀵󠁿 Flag for Tanintharyi (MM-05) 🏴󠁭󠁬󠀲󠁿 Flag for Koulikoro (ML-2) 🏴󠁭󠁫󠀶󠀳󠁿 Flag for Probištip (MK-63) 🏴󠁭󠁫󠀶󠀰󠁿 Flag for Pehčevo (MK-60) 🏴󠁭󠁭󠀰󠀱󠁿 Flag for Sagaing (MM-01) 🏴󠁭󠁫󠀸󠀰󠁿 Flag for Čaška (MK-80) 🏴󠁭󠁫󠀶󠀵󠁿 Flag for Rankovce (MK-65) 🏴󠁭󠁭󠀰󠀶󠁿 Flag for Yangon (MM-06) 🏴󠁭󠁫󠀷󠀶󠁿 Flag for Tetovo (MK-76) 🏴󠁭󠁫󠀶󠀷󠁿 Flag for Rosoman (MK-67) 🏴󠁭󠁲󠀰󠀳󠁿 Flag for Assaba (MR-03) 🏴󠁭󠁭󠀱󠀷󠁿 Flag for Shan (MM-17) 🏴󠁭󠁭󠀱󠀶󠁿 Flag for Rakhine (MM-16) 🏴󠁭󠁮󠀰󠀴󠀱󠁿 Flag for Khövsgöl (MN-041) 🏴󠁭󠁮󠀰󠀷󠀱󠁿 Flag for Bayan-Ölgii (MN-071) 🏴󠁭󠁮󠀰󠀶󠀹󠁿 Flag for Bayankhongor (MN-069) 🏴󠁭󠁮󠀰󠀶󠀱󠁿 Flag for Dornod (MN-061) 🏴󠁭󠁮󠀰󠀴󠀹󠁿 Flag for Selenge (MN-049) 🏴󠁭󠁮󠀱󠁿 Flag for Ulaanbaatar (MN-1) 🏴󠁭󠁮󠀰󠀳󠀷󠁿 Flag for Darkhan-Uul (MN-037) 🏴󠁭󠁮󠀰󠀴󠀷󠁿 Flag for Töv (MN-047) 🏴󠁭󠁭󠀱󠀵󠁿 Flag for Mon (MM-15) 🏴󠁭󠁲󠀰󠀶󠁿 Flag for Trarza (MR-06) 🏴󠁭󠁮󠀰󠀵󠀱󠁿 Flag for Sükhbaatar (MN-051) 🏴󠁭󠁲󠀰󠀴󠁿 Flag for Gorgol (MR-04) 🏴󠁭󠁮󠀰󠀵󠀵󠁿 Flag for Övörkhangai (MN-055) 🏴󠁭󠁭󠀱󠀴󠁿 Flag for Chin (MM-14) 🏴󠁭󠁮󠀰󠀶󠀷󠁿 Flag for Bulgan (MN-067) 🏴󠁭󠁮󠀰󠀵󠀷󠁿 Flag for Zavkhan (MN-057) 🏴󠁭󠁮󠀰󠀶󠀳󠁿 Flag for Dornogovi (MN-063) 🏴󠁭󠁮󠀰󠀵󠀳󠁿 Flag for Ömnögovi (MN-053) 🏴󠁭󠁭󠀱󠀳󠁿 Flag for Kayin (MM-13) 🏴󠁭󠁮󠀰󠀶󠀵󠁿 Flag for Govi-Altai (MN-065) 🏴󠁭󠁲󠀱󠀱󠁿 Flag for Tiris Zemmour (MR-11) 🏴󠁭󠁮󠀰󠀵󠀹󠁿 Flag for Dundgovi (MN-059) 🏴󠁭󠁮󠀰󠀷󠀳󠁿 Flag for Arkhangai (MN-073) 🏴󠁭󠁲󠀰󠀹󠁿 Flag for Tagant (MR-09) 🏴󠁭󠁮󠀰󠀴󠀳󠁿 Flag for Khovd (MN-043) 🏴󠁭󠁮󠀰󠀴󠀶󠁿 Flag for Uvs (MN-046) 🏴󠁭󠁮󠀰󠀶󠀴󠁿 Flag for Govisümber (MN-064) 🏴󠁭󠁲󠀰󠀵󠁿 Flag for Brakna (MR-05) 🏴󠁭󠁲󠀰󠀸󠁿 Flag for Dakhlet Nouadhibou (MR-08) 🏴󠁭󠁲󠀰󠀱󠁿 Flag for Hodh Ech Chargui (MR-01) 🏴󠁭󠁮󠀰󠀳󠀵󠁿 Flag for Orkhon (MN-035) 🏴󠁭󠁲󠀰󠀲󠁿 Flag for Hodh El Gharbi (MR-02) 🏴󠁭󠁭󠀱󠀸󠁿 Flag for Naypyidaw (MM-18) 🏴󠁭󠁲󠀰󠀷󠁿 Flag for Adrar (MR-07) 🏴󠁭󠁲󠀱󠀲󠁿 Flag for Inchiri (MR-12) 🏴󠁭󠁴󠀱󠀹󠁿 Flag for Iklin (MT-19) 🏴󠁭󠁴󠀱󠀴󠁿 Flag for Għarb (MT-14) 🏴󠁭󠁴󠀳󠀳󠁿 Flag for Mqabba (MT-33) 🏴󠁭󠁴󠀲󠀲󠁿 Flag for Kerċem (MT-22) 🏴󠁭󠁴󠀱󠀶󠁿 Flag for Għasri (MT-16) 🏴󠁭󠁴󠀲󠀴󠁿 Flag for Lija (MT-24) 🏴󠁭󠁴󠀰󠀵󠁿 Flag for Birżebbuġa (MT-05) 🏴󠁭󠁴󠀰󠀴󠁿 Flag for Birkirkara (MT-04) 🏴󠁭󠁴󠀳󠀱󠁿 Flag for Mġarr (MT-31) 🏴󠁭󠁴󠀰󠀲󠁿 Flag for Balzan (MT-02) 🏴󠁭󠁴󠀳󠀶󠁿 Flag for Munxar (MT-36) 🏴󠁭󠁴󠀱󠀳󠁿 Flag for Għajnsielem (MT-13) 🏴󠁭󠁴󠀳󠀸󠁿 Flag for Naxxar (MT-38) 🏴󠁭󠁴󠀰󠀹󠁿 Flag for Floriana (MT-09) 🏴󠁭󠁴󠀲󠀶󠁿 Flag for Marsa (MT-26) 🏴󠁭󠁴󠀰󠀷󠁿 Flag for Dingli (MT-07) 🏴󠁭󠁴󠀱󠀱󠁿 Flag for Gudja (MT-11) 🏴󠁭󠁴󠀲󠀳󠁿 Flag for Kirkop (MT-23) 🏴󠁭󠁴󠀲󠀷󠁿 Flag for Marsaskala (MT-27) 🏴󠁭󠁴󠀳󠀹󠁿 Flag for Paola (MT-39) 🏴󠁭󠁴󠀱󠀰󠁿 Flag for Fontana (MT-10) 🏴󠁭󠁴󠀳󠀴󠁿 Flag for Msida (MT-34) 🏴󠁭󠁴󠀳󠀷󠁿 Flag for Nadur (MT-37) 🏴󠁭󠁴󠀳󠀲󠁿 Flag for Mosta (MT-32) 🏴󠁭󠁴󠀳󠀵󠁿 Flag for Imtarfa (MT-35) 🏴󠁭󠁴󠀰󠀶󠁿 Flag for Cospicua (MT-06) 🏴󠁭󠁴󠀰󠀳󠁿 Flag for Birgu (MT-03) 🏴󠁭󠁲󠀱󠀴󠁿 Flag for Nouakchott Nord (MR-14) 🏴󠁭󠁴󠀱󠀲󠁿 Flag for Gżira (MT-12) 🏴󠁭󠁴󠀳󠀰󠁿 Flag for Mellieħa (MT-30) 🏴󠁭󠁴󠀱󠀷󠁿 Flag for Għaxaq (MT-17) 🏴󠁭󠁴󠀱󠀸󠁿 Flag for Ħamrun (MT-18) 🏴󠁭󠁴󠀰󠀸󠁿 Flag for Fgura (MT-08) 🏴󠁭󠁴󠀰󠀱󠁿 Flag for Attard (MT-01) 🏴󠁭󠁴󠀱󠀵󠁿 Flag for Għargħur (MT-15) 🏴󠁭󠁴󠀲󠀱󠁿 Flag for Kalkara (MT-21) 🏴󠁭󠁲󠀱󠀵󠁿 Flag for Nouakchott Sud (MR-15) 🏴󠁭󠁴󠀲󠀸󠁿 Flag for Marsaxlokk (MT-28) 🏴󠁭󠁴󠀴󠀵󠁿 Flag for Victoria (MT-45) 🏴󠁭󠁴󠀴󠀲󠁿 Flag for Qala (MT-42) 🏴󠁭󠁴󠀶󠀴󠁿 Flag for Żabbar (MT-64) 🏴󠁭󠁵󠁡󠁧󠁿 Flag for Agaléga (MU-AG) 🏴󠁭󠁴󠀵󠀸󠁿 Flag for Ta’ Xbiex (MT-58) 🏴󠁭󠁴󠀴󠀱󠁿 Flag for Pietà (MT-41) 🏴󠁭󠁴󠀵󠀲󠁿 Flag for Sannat (MT-52) 🏴󠁭󠁵󠁰󠁬󠁿 Flag for Port Louis District (MU-PL) 🏴󠁭󠁴󠀶󠀱󠁿 Flag for Xagħra (MT-61) 🏴󠁭󠁵󠁢󠁬󠁿 Flag for Rivière Noire (MU-BL) 🏴󠁭󠁴󠀵󠀶󠁿 Flag for Sliema (MT-56) 🏴󠁭󠁴󠀴󠀷󠁿 Flag for Safi (MT-47) 🏴󠁭󠁵󠁦󠁬󠁿 Flag for Flacq (MU-FL) 🏴󠁭󠁴󠀴󠀰󠁿 Flag for Pembroke (MT-40) 🏴󠁭󠁴󠀵󠀷󠁿 Flag for Swieqi (MT-57) 🏴󠁭󠁵󠁣󠁵󠁿 Flag for Curepipe (MU-CU) 🏴󠁭󠁴󠀶󠀸󠁿 Flag for Żurrieq (MT-68) 🏴󠁭󠁴󠀴󠀹󠁿 Flag for San Ġwann (MT-49) 🏴󠁭󠁵󠁧󠁰󠁿 Flag for Grand Port (MU-GP) 🏴󠁭󠁵󠁣󠁣󠁿 Flag for Cargados Carajos (MU-CC) 🏴󠁭󠁴󠀴󠀴󠁿 Flag for Qrendi (MT-44) 🏴󠁭󠁴󠀶󠀰󠁿 Flag for Valletta (MT-60) 🏴󠁭󠁵󠁰󠁡󠁿 Flag for Pamplemousses (MU-PA) 🏴󠁭󠁴󠀴󠀳󠁿 Flag for Qormi (MT-43) 🏴󠁭󠁵󠁰󠁵󠁿 Flag for Port Louis (MU-PU) 🏴󠁭󠁴󠀵󠀹󠁿 Flag for Tarxien (MT-59) 🏴󠁭󠁴󠀶󠀵󠁿 Flag for Żebbuġ Gozo (MT-65) 🏴󠁭󠁴󠀵󠀰󠁿 Flag for Saint Lawrence (MT-50) 🏴󠁭󠁴󠀶󠀷󠁿 Flag for Żejtun (MT-67) 🏴󠁭󠁴󠀵󠀱󠁿 Flag for St. Paul’s Bay (MT-51) 🏴󠁭󠁴󠀵󠀳󠁿 Flag for Santa Luċija (MT-53) 🏴󠁭󠁴󠀶󠀶󠁿 Flag for Żebbuġ (MT-66) 🏴󠁭󠁴󠀴󠀶󠁿 Flag for Rabat (MT-46) 🏴󠁭󠁴󠀵󠀵󠁿 Flag for Siġġiewi (MT-55) 👩🏽‍👩🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁭󠁴󠀵󠀴󠁿 Flag for Santa Venera (MT-54) 🏴󠁭󠁴󠀶󠀳󠁿 Flag for Xgħajra (MT-63) 🏴󠁭󠁵󠁭󠁯󠁿 Flag for Moka (MU-MO) 🏴󠁭󠁸󠁭󠁩󠁣󠁿 Flag for Michoacán (MX-MIC) 🏴󠁭󠁷󠁮󠁿 Flag for Northern (MW-N) 🏴󠁭󠁶󠁵󠁮󠁿 Flag for Upper North Province (MV-UN) 🏴󠁭󠁸󠁣󠁯󠁬󠁿 Flag for Colima (MX-COL) 🏴󠁭󠁵󠁲󠁯󠁿 Flag for Rodrigues (MU-RO) 🏴󠁭󠁸󠁧󠁵󠁡󠁿 Flag for Guanajuato (MX-GUA) 🏴󠁭󠁸󠁣󠁭󠁸󠁿 Flag for Ciudad de Mexico (MX-CMX) 🏴󠁭󠁸󠁰󠁵󠁥󠁿 Flag for Puebla (MX-PUE) 🏴󠁭󠁵󠁱󠁢󠁿 Flag for Quatre Bornes (MU-QB) 🏴󠁭󠁸󠁯󠁡󠁸󠁿 Flag for Oaxaca (MX-OAX) 🏴󠁭󠁷󠁣󠁿 Flag for Central (MW-C) 🏴󠁭󠁵󠁳󠁡󠁿 Flag for Savanne (MU-SA) 🏴󠁭󠁸󠁭󠁯󠁲󠁿 Flag for Morelos (MX-MOR) 🏴󠁭󠁸󠁨󠁩󠁤󠁿 Flag for Hidalgo (MX-HID) 🏴󠁭󠁸󠁡󠁧󠁵󠁿 Flag for Aguascalientes (MX-AGU) 🏴󠁭󠁸󠁣󠁡󠁭󠁿 Flag for Campeche (MX-CAM) 🏴󠁭󠁸󠁮󠁬󠁥󠁿 Flag for Nuevo León (MX-NLE) 🏴󠁭󠁶󠁭󠁬󠁥󠁿 Flag for Malé (MV-MLE) 🏴󠁭󠁸󠁧󠁲󠁯󠁿 Flag for Guerrero (MX-GRO) 🏴󠁭󠁵󠁶󠁰󠁿 Flag for Vacoas-Phoenix (MU-VP) 👨🏻‍👨🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁭󠁶󠁮󠁣󠁿 Flag for North Central Province (MV-NC) 🏴󠁭󠁸󠁭󠁥󠁸󠁿 Flag for Mexico State (MX-MEX) 🏴󠁭󠁵󠁰󠁷󠁿 Flag for Plaines Wilhems (MU-PW) 🏴󠁭󠁶󠁣󠁥󠁿 Flag for Central Province (MV-CE) 🏴󠁭󠁸󠁣󠁯󠁡󠁿 Flag for Coahuila (MX-COA) 🏴󠁭󠁶󠁳󠁵󠁿 Flag for South Province (MV-SU) 🏴󠁭󠁸󠁣󠁨󠁰󠁿 Flag for Chiapas (MX-CHP) 🏴󠁭󠁷󠁳󠁿 Flag for Southern (MW-S) 🏴󠁭󠁺󠁳󠁿 Flag for Sofala (MZ-S) 🏴󠁭󠁹󠀰󠀹󠁿 Flag for Perlis (MY-09) 🏴󠁭󠁸󠁶󠁥󠁲󠁿 Flag for Veracruz (MX-VER) 🏴󠁭󠁹󠀱󠀳󠁿 Flag for Sarawak (MY-13) 🏴󠁭󠁹󠀰󠀳󠁿 Flag for Kelantan (MY-03) 🏴󠁮󠁡󠁣󠁡󠁿 Flag for Zambezi (NA-CA) 🏴󠁭󠁺󠁢󠁿 Flag for Manica (MZ-B) 🏴󠁭󠁹󠀱󠀵󠁿 Flag for Labuan (MY-15) 🏴󠁭󠁺󠁰󠁿 Flag for Cabo Delgado (MZ-P) 🏴󠁮󠁡󠁨󠁡󠁿 Flag for Hardap (NA-HA) 🏴󠁭󠁺󠁴󠁿 Flag for Tete (MZ-T) 🏴󠁭󠁹󠀰󠀲󠁿 Flag for Kedah (MY-02) 🏴󠁭󠁹󠀰󠀶󠁿 Flag for Pahang (MY-06) 🏴󠁭󠁹󠀰󠀷󠁿 Flag for Penang (MY-07) 🏴󠁭󠁹󠀰󠀸󠁿 Flag for Perak (MY-08) 🏴󠁭󠁺󠁬󠁿 Flag for Maputo Province (MZ-L) 🏴󠁢󠁲󠁧󠁯󠁿 Flag for Goiás (BR-GO) 🏴󠁭󠁹󠀱󠀱󠁿 Flag for Terengganu (MY-11) 🏴󠁭󠁺󠁩󠁿 Flag for Inhambane (MZ-I) 🏴󠁭󠁹󠀰󠀴󠁿 Flag for Malacca (MY-04) 🏴󠁮󠁡󠁥󠁲󠁿 Flag for Erongo (NA-ER) 🏴󠁭󠁸󠁴󠁬󠁡󠁿 Flag for Tlaxcala (MX-TLA) 🏴󠁭󠁹󠀰󠀵󠁿 Flag for Negeri Sembilan (MY-05) 🏴󠁭󠁸󠁺󠁡󠁣󠁿 Flag for Zacatecas (MX-ZAC) 🏴󠁭󠁸󠁴󠁡󠁭󠁿 Flag for Tamaulipas (MX-TAM) 🏴󠁭󠁺󠁡󠁿 Flag for Niassa (MZ-A) 🏴󠁭󠁺󠁭󠁰󠁭󠁿 Flag for Maputo (MZ-MPM) 🏴󠁭󠁺󠁮󠁿 Flag for Nampula (MZ-N) 🏴󠁭󠁹󠀱󠀶󠁿 Flag for Putrajaya (MY-16) 🏴󠁭󠁸󠁳󠁩󠁮󠁿 Flag for Sinaloa (MX-SIN) 🏴󠁭󠁸󠁹󠁵󠁣󠁿 Flag for Yucatán (MX-YUC) 🏴󠁭󠁹󠀱󠀲󠁿 Flag for Sabah (MY-12) 👩🏼‍👩🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁭󠁺󠁱󠁿 Flag for Zambezia (MZ-Q) 🏴󠁭󠁸󠁱󠁵󠁥󠁿 Flag for Querétaro (MX-QUE) 🏴󠁭󠁺󠁧󠁿 Flag for Gaza (MZ-G) 🏴󠁮󠁡󠁯󠁤󠁿 Flag for Otjozondjupa (NA-OD) 🏴󠁮󠁥󠀴󠁿 Flag for Maradi (NE-4) 🏴󠁮󠁡󠁫󠁵󠁿 Flag for Kunene (NA-KU) 🏴󠁮󠁧󠁡󠁫󠁿 Flag for Akwa Ibom (NG-AK) 🏴󠁮󠁥󠀵󠁿 Flag for Tahoua (NE-5) 🏴󠁭󠁵󠁲󠁲󠁿 Flag for Rivière du Rempart (MU-RR) 🏴󠁮󠁧󠁩󠁭󠁿 Flag for Imo (NG-IM) 🏴󠁮󠁧󠁫󠁴󠁿 Flag for Katsina (NG-KT) 🏴󠁮󠁥󠀳󠁿 Flag for Dosso (NE-3) 🏴󠁮󠁥󠀶󠁿 Flag for Tillabéri (NE-6) 🏴󠁮󠁧󠁥󠁫󠁿 Flag for Ekiti (NG-EK) 🏴󠁮󠁡󠁯󠁨󠁿 Flag for Omaheke (NA-OH) 🏴󠁮󠁧󠁢󠁡󠁿 Flag for Bauchi (NG-BA) 🏴󠁮󠁡󠁫󠁡󠁿 Flag for Karas (NA-KA) 🏴󠁮󠁧󠁢󠁹󠁿 Flag for Bayelsa (NG-BY) 🏴󠁮󠁡󠁯󠁷󠁿 Flag for Ohangwena (NA-OW) 🏴󠁮󠁧󠁢󠁥󠁿 Flag for Benue (NG-BE) 🏴󠁮󠁧󠁥󠁮󠁿 Flag for Enugu (NG-EN) 🏴󠁮󠁡󠁯󠁮󠁿 Flag for Oshana (NA-ON) 🏴󠁮󠁧󠁫󠁤󠁿 Flag for Kaduna (NG-KD) 👨🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 🏴󠁮󠁧󠁫󠁥󠁿 Flag for Kebbi (NG-KE) 🏴󠁮󠁧󠁪󠁩󠁿 Flag for Jigawa (NG-JI) 🏴󠁮󠁥󠀸󠁿 Flag for Niamey (NE-8) 🏴󠁮󠁧󠁡󠁮󠁿 Flag for Anambra (NG-AN) 🏴󠁮󠁧󠁧󠁯󠁿 Flag for Gombe (NG-GO) 🏴󠁮󠁥󠀱󠁿 Flag for Agadez (NE-1) 🏴󠁮󠁡󠁫󠁨󠁿 Flag for Khomas (NA-KH) 🏴󠁮󠁥󠀲󠁿 Flag for Diffa (NE-2) 🏴󠁭󠁹󠀰󠀱󠁿 Flag for Johor (MY-01) 🏴󠁮󠁧󠁫󠁮󠁿 Flag for Kano (NG-KN) 🏴󠁮󠁡󠁯󠁳󠁿 Flag for Omusati (NA-OS) 🏴󠁮󠁧󠁫󠁯󠁿 Flag for Kogi (NG-KO) 🏴󠁮󠁧󠁥󠁤󠁿 Flag for Edo (NG-ED) 🏴󠁮󠁧󠁡󠁢󠁿 Flag for Abia (NG-AB) 🏴󠁮󠁡󠁯󠁴󠁿 Flag for Oshikoto (NA-OT) 🏴󠁮󠁡󠁫󠁷󠁿 Flag for Kavango West (NA-KW) 🏴󠁮󠁧󠁥󠁢󠁿 Flag for Ebonyi (NG-EB) 🏴󠁮󠁥󠀷󠁿 Flag for Zinder (NE-7) 🏴󠁮󠁩󠁪󠁩󠁿 Flag for Jinotega (NI-JI) 🏴󠁮󠁧󠁮󠁡󠁿 Flag for Nasarawa (NG-NA) 🏴󠁮󠁬󠁦󠁲󠁿 Flag for Friesland (NL-FR) 🏴󠁮󠁧󠁳󠁯󠁿 Flag for Sokoto (NG-SO) 🏴󠁮󠁩󠁲󠁩󠁿 Flag for Rivas (NI-RI) 🏴󠁮󠁩󠁮󠁳󠁿 Flag for Nueva Segovia (NI-NS) 🏴󠁮󠁧󠁰󠁬󠁿 Flag for Plateau (NG-PL) 🏴󠁮󠁧󠁹󠁯󠁿 Flag for Yobe (NG-YO) 🏴󠁮󠁬󠁢󠁱󠀱󠁿 Flag for Bonaire (NL-BQ1) 🏴󠁮󠁩󠁡󠁮󠁿 Flag for Atlántico Norte (NI-AN) 🏴󠁮󠁧󠁺󠁡󠁿 Flag for Zamfara (NG-ZA) 🏴󠁮󠁬󠁧󠁥󠁿 Flag for Gelderland (NL-GE) 🏴󠁮󠁧󠁯󠁹󠁿 Flag for Oyo (NG-OY) 🏴󠁮󠁩󠁭󠁤󠁿 Flag for Madriz (NI-MD) 🏴󠁮󠁩󠁣󠁩󠁿 Flag for Chinandega (NI-CI) 🏴󠁮󠁧󠁯󠁮󠁿 Flag for Ondo (NG-ON) 👨🏽‍👨🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁤󠁥󠁮󠁷󠁿 Flag for North Rhine-Westphalia (DE-NW) 🏴󠁮󠁧󠁬󠁡󠁿 Flag for Lagos (NG-LA) 🏴󠁮󠁩󠁭󠁮󠁿 Flag for Managua (NI-MN) 🏴󠁮󠁩󠁡󠁳󠁿 Flag for Atlántico Sur (NI-AS) 🏴󠁮󠁬󠁣󠁷󠁿 Flag for Curaçao (NL-CW) 🏴󠁮󠁩󠁢󠁯󠁿 Flag for Boaco (NI-BO) 🏴󠁮󠁧󠁲󠁩󠁿 Flag for Rivers (NG-RI) 🏴󠁮󠁩󠁧󠁲󠁿 Flag for Granada (NI-GR) 🏴󠁮󠁩󠁣󠁯󠁿 Flag for Chontales (NI-CO) 🏴󠁮󠁬󠁧󠁲󠁿 Flag for Groningen (NL-GR) 🏴󠁮󠁬󠁢󠁱󠀳󠁿 Flag for Sint Eustatius (NL-BQ3) 🏴󠁮󠁩󠁳󠁪󠁿 Flag for Río San Juan (NI-SJ) 🏴󠁮󠁧󠁯󠁳󠁿 Flag for Osun (NG-OS) 🏴󠁮󠁧󠁴󠁡󠁿 Flag for Taraba (NG-TA) 🏴󠁮󠁬󠁦󠁬󠁿 Flag for Flevoland (NL-FL) 🏴󠁮󠁩󠁭󠁴󠁿 Flag for Matagalpa (NI-MT) 🏴󠁮󠁬󠁤󠁲󠁿 Flag for Drenthe (NL-DR) 🏴󠁮󠁩󠁣󠁡󠁿 Flag for Carazo (NI-CA) 🏴󠁮󠁧󠁫󠁷󠁿 Flag for Kwara (NG-KW) 🏴󠁮󠁧󠁮󠁩󠁿 Flag for Niger (NG-NI) 🏴󠁮󠁩󠁥󠁳󠁿 Flag for Estelí (NI-ES) 🏴󠁮󠁬󠁺󠁨󠁿 Flag for South Holland (NL-ZH) """ for line in emojis.splitlines(): words = line.split() char = words[0] desc = " ".join(words[1:]) print("{}\t:{}".format(desc, char))
en
0.437167
#!/usr/bin/python3 # -*- coding: utf-8 -*- ⛑🏻 Helmet With White Cross, Type-1-2 ⛑🏼 Helmet With White Cross, Type-3 ⛑🏽 Helmet With White Cross, Type-4 ⛑🏾 Helmet With White Cross, Type-5 ⛑🏿 Helmet With White Cross, Type-6 💏🏻 Kiss, Type-1-2 💏🏼 Kiss, Type-3 💏🏽 Kiss, Type-4 💏🏾 Kiss, Type-5 💏🏿 Kiss, Type-6 💑🏻 Couple With Heart, Type-1-2 💑🏼 Couple With Heart, Type-3 💑🏽 Couple With Heart, Type-4 💑🏾 Couple With Heart, Type-5 💑🏿 Couple With Heart, Type-6 ⛷🏻 Skier, Type-1-2 ⛷🏼 Skier, Type-3 ⛷🏽 Skier, Type-4 ⛷🏾 Skier, Type-5 ⛷🏿 Skier, Type-6 😀 Grinning Face 😁 Grinning Face With Smiling Eyes 😂 Face With Tears of Joy 🤣 Rolling on the Floor Laughing 😃 Smiling Face With Open Mouth 😄 Smiling Face With Open Mouth & Smiling Eyes 😅 Smiling Face With Open Mouth & Cold Sweat 😆 Smiling Face With Open Mouth & Closed Eyes 😉 Winking Face 😊 Smiling Face With Smiling Eyes 😋 Face Savouring Delicious Food 😎 Smiling Face With Sunglasses 😍 Smiling Face With Heart-Eyes 😘 Face Blowing a Kiss 😗 Kissing Face 😙 Kissing Face With Smiling Eyes 😚 Kissing Face With Closed Eyes ☺ Smiling Face 🙂 Slightly Smiling Face 🤗 Hugging Face 🤩 Star-Struck 🤔 Thinking Face 🤨 Face With Raised Eyebrow 😐 Neutral Face 😑 Expressionless Face 😶 Face Without Mouth 🙄 Face With Rolling Eyes 😏 Smirking Face 😣 Persevering Face 😥 Disappointed but Relieved Face 😮 Face With Open Mouth 🤐 Zipper-Mouth Face 😯 Hushed Face 😪 Sleepy Face 😫 Tired Face 😴 Sleeping Face 😌 Relieved Face 😛 Face With Stuck-Out Tongue 😜 Face With Stuck-Out Tongue & Winking Eye 😝 Face With Stuck-Out Tongue & Closed Eyes 🤤 Drooling Face 😒 Unamused Face 😓 Face With Cold Sweat 😔 Pensive Face 😕 Confused Face 🙃 Upside-Down Face 🤑 Money-Mouth Face 😲 Astonished Face ☹ Frowning Face 🙁 Slightly Frowning Face 😖 Confounded Face 😞 Disappointed Face 😟 Worried Face 😤 Face With Steam From Nose 😢 Crying Face 😭 Loudly Crying Face 😦 Frowning Face With Open Mouth 😧 Anguished Face 😨 Fearful Face 😩 Weary Face 🤯 Exploding Head 😬 Grimacing Face 😰 Face With Open Mouth & Cold Sweat 😱 Face Screaming in Fear 😳 Flushed Face 🤪 Crazy Face 😵 Dizzy Face 😡 Pouting Face 😠 Angry Face 🤬 Face With Symbols Over Mouth 😷 Face With Medical Mask 🤒 Face With Thermometer 🤕 Face With Head-Bandage 🤢 Nauseated Face 🤮 Face Vomiting 🤧 Sneezing Face 😇 Smiling Face With Halo 🤠 Cowboy Hat Face 🤡 Clown Face 🤥 Lying Face 🤫 Shushing Face 🤭 Face With Hand Over Mouth 🧐 Face With Monocle 🤓 Nerd Face 😈 Smiling Face With Horns 👿 Angry Face With Horns 👹 Ogre 👺 Goblin 💀 Skull ☠ Skull and Crossbones 👻 Ghost 👽 Alien 👾 Alien Monster 🤖 Robot Face 💩 Pile of Poo 😺 Smiling Cat Face With Open Mouth 😸 Grinning Cat Face With Smiling Eyes 😹 Cat Face With Tears of Joy 😻 Smiling Cat Face With Heart-Eyes 😼 Cat Face With Wry Smile 😽 Kissing Cat Face With Closed Eyes 🙀 Weary Cat Face 😿 Crying Cat Face 😾 Pouting Cat Face 🙈 See-No-Evil Monkey 🙉 Hear-No-Evil Monkey 🙊 Speak-No-Evil Monkey 👶 Baby 👶🏻 Baby: Light Skin Tone 👶🏼 Baby: Medium-Light Skin Tone 👶🏽 Baby: Medium Skin Tone 👶🏾 Baby: Medium-Dark Skin Tone 👶🏿 Baby: Dark Skin Tone 🧒 Child 🧒🏻 Child: Light Skin Tone 🧒🏼 Child: Medium-Light Skin Tone 🧒🏽 Child: Medium Skin Tone 🧒🏾 Child: Medium-Dark Skin Tone 🧒🏿 Child: Dark Skin Tone 👦 Boy 👦🏻 Boy: Light Skin Tone 👦🏼 Boy: Medium-Light Skin Tone 👦🏽 Boy: Medium Skin Tone 👦🏾 Boy: Medium-Dark Skin Tone 👦🏿 Boy: Dark Skin Tone 👧 Girl 👧🏻 Girl: Light Skin Tone 👧🏼 Girl: Medium-Light Skin Tone 👧🏽 Girl: Medium Skin Tone 👧🏾 Girl: Medium-Dark Skin Tone 👧🏿 Girl: Dark Skin Tone 🧑 Adult 🧑🏻 Adult: Light Skin Tone 🧑🏼 Adult: Medium-Light Skin Tone 🧑🏽 Adult: Medium Skin Tone 🧑🏾 Adult: Medium-Dark Skin Tone 🧑🏿 Adult: Dark Skin Tone 👨 Man 👨🏻 Man: Light Skin Tone 👨🏼 Man: Medium-Light Skin Tone 👨🏽 Man: Medium Skin Tone 👨🏾 Man: Medium-Dark Skin Tone 👨🏿 Man: Dark Skin Tone 👩 Woman 👩🏻 Woman: Light Skin Tone 👩🏼 Woman: Medium-Light Skin Tone 👩🏽 Woman: Medium Skin Tone 👩🏾 Woman: Medium-Dark Skin Tone 👩🏿 Woman: Dark Skin Tone 🧓 Older Adult 🧓🏻 Older Adult: Light Skin Tone 🧓🏼 Older Adult: Medium-Light Skin Tone 🧓🏽 Older Adult: Medium Skin Tone 🧓🏾 Older Adult: Medium-Dark Skin Tone 🧓🏿 Older Adult: Dark Skin Tone 👴 Old Man 👴🏻 Old Man: Light Skin Tone 👴🏼 Old Man: Medium-Light Skin Tone 👴🏽 Old Man: Medium Skin Tone 👴🏾 Old Man: Medium-Dark Skin Tone 👴🏿 Old Man: Dark Skin Tone 👵 Old Woman 👵🏻 Old Woman: Light Skin Tone 👵🏼 Old Woman: Medium-Light Skin Tone 👵🏽 Old Woman: Medium Skin Tone 👵🏾 Old Woman: Medium-Dark Skin Tone 👵🏿 Old Woman: Dark Skin Tone 👨‍⚕️ Man Health Worker 👨🏻‍⚕️ Man Health Worker: Light Skin Tone 👨🏼‍⚕️ Man Health Worker: Medium-Light Skin Tone 👨🏽‍⚕️ Man Health Worker: Medium Skin Tone 👨🏾‍⚕️ Man Health Worker: Medium-Dark Skin Tone 👨🏿‍⚕️ Man Health Worker: Dark Skin Tone 👩‍⚕️ Woman Health Worker 👩🏻‍⚕️ Woman Health Worker: Light Skin Tone 👩🏼‍⚕️ Woman Health Worker: Medium-Light Skin Tone 👩🏽‍⚕️ Woman Health Worker: Medium Skin Tone 👩🏾‍⚕️ Woman Health Worker: Medium-Dark Skin Tone 👩🏿‍⚕️ Woman Health Worker: Dark Skin Tone 👨‍🎓 Man Student 👨🏻‍🎓 Man Student: Light Skin Tone 👨🏼‍🎓 Man Student: Medium-Light Skin Tone 👨🏽‍🎓 Man Student: Medium Skin Tone 👨🏾‍🎓 Man Student: Medium-Dark Skin Tone 👨🏿‍🎓 Man Student: Dark Skin Tone 👩‍🎓 Woman Student 👩🏻‍🎓 Woman Student: Light Skin Tone 👩🏼‍🎓 Woman Student: Medium-Light Skin Tone 👩🏽‍🎓 Woman Student: Medium Skin Tone 👩🏾‍🎓 Woman Student: Medium-Dark Skin Tone 👩🏿‍🎓 Woman Student: Dark Skin Tone 👨‍🏫 Man Teacher 👨🏻‍🏫 Man Teacher: Light Skin Tone 👨🏼‍🏫 Man Teacher: Medium-Light Skin Tone 👨🏽‍🏫 Man Teacher: Medium Skin Tone 👨🏾‍🏫 Man Teacher: Medium-Dark Skin Tone 👨🏿‍🏫 Man Teacher: Dark Skin Tone 👩‍🏫 Woman Teacher 👩🏻‍🏫 Woman Teacher: Light Skin Tone 👩🏼‍🏫 Woman Teacher: Medium-Light Skin Tone 👩🏽‍🏫 Woman Teacher: Medium Skin Tone 👩🏾‍🏫 Woman Teacher: Medium-Dark Skin Tone 👩🏿‍🏫 Woman Teacher: Dark Skin Tone 👨‍⚖️ Man Judge 👨🏻‍⚖️ Man Judge: Light Skin Tone 👨🏼‍⚖️ Man Judge: Medium-Light Skin Tone 👨🏽‍⚖️ Man Judge: Medium Skin Tone 👨🏾‍⚖️ Man Judge: Medium-Dark Skin Tone 👨🏿‍⚖️ Man Judge: Dark Skin Tone 👩‍⚖️ Woman Judge 👩🏻‍⚖️ Woman Judge: Light Skin Tone 👩🏼‍⚖️ Woman Judge: Medium-Light Skin Tone 👩🏽‍⚖️ Woman Judge: Medium Skin Tone 👩🏾‍⚖️ Woman Judge: Medium-Dark Skin Tone 👩🏿‍⚖️ Woman Judge: Dark Skin Tone 👨‍🌾 Man Farmer 👨🏻‍🌾 Man Farmer: Light Skin Tone 👨🏼‍🌾 Man Farmer: Medium-Light Skin Tone 👨🏽‍🌾 Man Farmer: Medium Skin Tone 👨🏾‍🌾 Man Farmer: Medium-Dark Skin Tone 👨🏿‍🌾 Man Farmer: Dark Skin Tone 👩‍🌾 Woman Farmer 👩🏻‍🌾 Woman Farmer: Light Skin Tone 👩🏼‍🌾 Woman Farmer: Medium-Light Skin Tone 👩🏽‍🌾 Woman Farmer: Medium Skin Tone 👩🏾‍🌾 Woman Farmer: Medium-Dark Skin Tone 👩🏿‍🌾 Woman Farmer: Dark Skin Tone 👨‍🍳 Man Cook 👨🏻‍🍳 Man Cook: Light Skin Tone 👨🏼‍🍳 Man Cook: Medium-Light Skin Tone 👨🏽‍🍳 Man Cook: Medium Skin Tone 👨🏾‍🍳 Man Cook: Medium-Dark Skin Tone 👨🏿‍🍳 Man Cook: Dark Skin Tone 👩‍🍳 Woman Cook 👩🏻‍🍳 Woman Cook: Light Skin Tone 👩🏼‍🍳 Woman Cook: Medium-Light Skin Tone 👩🏽‍🍳 Woman Cook: Medium Skin Tone 👩🏾‍🍳 Woman Cook: Medium-Dark Skin Tone 👩🏿‍🍳 Woman Cook: Dark Skin Tone 👨‍🔧 Man Mechanic 👨🏻‍🔧 Man Mechanic: Light Skin Tone 👨🏼‍🔧 Man Mechanic: Medium-Light Skin Tone 👨🏽‍🔧 Man Mechanic: Medium Skin Tone 👨🏾‍🔧 Man Mechanic: Medium-Dark Skin Tone 👨🏿‍🔧 Man Mechanic: Dark Skin Tone 👩‍🔧 Woman Mechanic 👩🏻‍🔧 Woman Mechanic: Light Skin Tone 👩🏼‍🔧 Woman Mechanic: Medium-Light Skin Tone 👩🏽‍🔧 Woman Mechanic: Medium Skin Tone 👩🏾‍🔧 Woman Mechanic: Medium-Dark Skin Tone 👩🏿‍🔧 Woman Mechanic: Dark Skin Tone 👨‍🏭 Man Factory Worker 👨🏻‍🏭 Man Factory Worker: Light Skin Tone 👨🏼‍🏭 Man Factory Worker: Medium-Light Skin Tone 👨🏽‍🏭 Man Factory Worker: Medium Skin Tone 👨🏾‍🏭 Man Factory Worker: Medium-Dark Skin Tone 👨🏿‍🏭 Man Factory Worker: Dark Skin Tone 👩‍🏭 Woman Factory Worker 👩🏻‍🏭 Woman Factory Worker: Light Skin Tone 👩🏼‍🏭 Woman Factory Worker: Medium-Light Skin Tone 👩🏽‍🏭 Woman Factory Worker: Medium Skin Tone 👩🏾‍🏭 Woman Factory Worker: Medium-Dark Skin Tone 👩🏿‍🏭 Woman Factory Worker: Dark Skin Tone 👨‍💼 Man Office Worker 👨🏻‍💼 Man Office Worker: Light Skin Tone 👨🏼‍💼 Man Office Worker: Medium-Light Skin Tone 👨🏽‍💼 Man Office Worker: Medium Skin Tone 👨🏾‍💼 Man Office Worker: Medium-Dark Skin Tone 👨🏿‍💼 Man Office Worker: Dark Skin Tone 👩‍💼 Woman Office Worker 👩🏻‍💼 Woman Office Worker: Light Skin Tone 👩🏼‍💼 Woman Office Worker: Medium-Light Skin Tone 👩🏽‍💼 Woman Office Worker: Medium Skin Tone 👩🏾‍💼 Woman Office Worker: Medium-Dark Skin Tone 👩🏿‍💼 Woman Office Worker: Dark Skin Tone 👨‍🔬 Man Scientist 👨🏻‍🔬 Man Scientist: Light Skin Tone 👨🏼‍🔬 Man Scientist: Medium-Light Skin Tone 👨🏽‍🔬 Man Scientist: Medium Skin Tone 👨🏾‍🔬 Man Scientist: Medium-Dark Skin Tone 👨🏿‍🔬 Man Scientist: Dark Skin Tone 👩‍🔬 Woman Scientist 👩🏻‍🔬 Woman Scientist: Light Skin Tone 👩🏼‍🔬 Woman Scientist: Medium-Light Skin Tone 👩🏽‍🔬 Woman Scientist: Medium Skin Tone 👩🏾‍🔬 Woman Scientist: Medium-Dark Skin Tone 👩🏿‍🔬 Woman Scientist: Dark Skin Tone 👨‍💻 Man Technologist 👨🏻‍💻 Man Technologist: Light Skin Tone 👨🏼‍💻 Man Technologist: Medium-Light Skin Tone 👨🏽‍💻 Man Technologist: Medium Skin Tone 👨🏾‍💻 Man Technologist: Medium-Dark Skin Tone 👨🏿‍💻 Man Technologist: Dark Skin Tone 👩‍💻 Woman Technologist 👩🏻‍💻 Woman Technologist: Light Skin Tone 👩🏼‍💻 Woman Technologist: Medium-Light Skin Tone 👩🏽‍💻 Woman Technologist: Medium Skin Tone 👩🏾‍💻 Woman Technologist: Medium-Dark Skin Tone 👩🏿‍💻 Woman Technologist: Dark Skin Tone 👨‍🎤 Man Singer 👨🏻‍🎤 Man Singer: Light Skin Tone 👨🏼‍🎤 Man Singer: Medium-Light Skin Tone 👨🏽‍🎤 Man Singer: Medium Skin Tone 👨🏾‍🎤 Man Singer: Medium-Dark Skin Tone 👨🏿‍🎤 Man Singer: Dark Skin Tone 👩‍🎤 Woman Singer 👩🏻‍🎤 Woman Singer: Light Skin Tone 👩🏼‍🎤 Woman Singer: Medium-Light Skin Tone 👩🏽‍🎤 Woman Singer: Medium Skin Tone 👩🏾‍🎤 Woman Singer: Medium-Dark Skin Tone 👩🏿‍🎤 Woman Singer: Dark Skin Tone 👨‍🎨 Man Artist 👨🏻‍🎨 Man Artist: Light Skin Tone 👨🏼‍🎨 Man Artist: Medium-Light Skin Tone 👨🏽‍🎨 Man Artist: Medium Skin Tone 👨🏾‍🎨 Man Artist: Medium-Dark Skin Tone 👨🏿‍🎨 Man Artist: Dark Skin Tone 👩‍🎨 Woman Artist 👩🏻‍🎨 Woman Artist: Light Skin Tone 👩🏼‍🎨 Woman Artist: Medium-Light Skin Tone 👩🏽‍🎨 Woman Artist: Medium Skin Tone 👩🏾‍🎨 Woman Artist: Medium-Dark Skin Tone 👩🏿‍🎨 Woman Artist: Dark Skin Tone 👨‍✈️ Man Pilot 👨🏻‍✈️ Man Pilot: Light Skin Tone 👨🏼‍✈️ Man Pilot: Medium-Light Skin Tone 👨🏽‍✈️ Man Pilot: Medium Skin Tone 👨🏾‍✈️ Man Pilot: Medium-Dark Skin Tone 👨🏿‍✈️ Man Pilot: Dark Skin Tone 👩‍✈️ Woman Pilot 👩🏻‍✈️ Woman Pilot: Light Skin Tone 👩🏼‍✈️ Woman Pilot: Medium-Light Skin Tone 👩🏽‍✈️ Woman Pilot: Medium Skin Tone 👩🏾‍✈️ Woman Pilot: Medium-Dark Skin Tone 👩🏿‍✈️ Woman Pilot: Dark Skin Tone 👨‍🚀 Man Astronaut 👨🏻‍🚀 Man Astronaut: Light Skin Tone 👨🏼‍🚀 Man Astronaut: Medium-Light Skin Tone 👨🏽‍🚀 Man Astronaut: Medium Skin Tone 👨🏾‍🚀 Man Astronaut: Medium-Dark Skin Tone 👨🏿‍🚀 Man Astronaut: Dark Skin Tone 👩‍🚀 Woman Astronaut 👩🏻‍🚀 Woman Astronaut: Light Skin Tone 👩🏼‍🚀 Woman Astronaut: Medium-Light Skin Tone 👩🏽‍🚀 Woman Astronaut: Medium Skin Tone 👩🏾‍🚀 Woman Astronaut: Medium-Dark Skin Tone 👩🏿‍🚀 Woman Astronaut: Dark Skin Tone 👨‍🚒 Man Firefighter 👨🏻‍🚒 Man Firefighter: Light Skin Tone 👨🏼‍🚒 Man Firefighter: Medium-Light Skin Tone 👨🏽‍🚒 Man Firefighter: Medium Skin Tone 👨🏾‍🚒 Man Firefighter: Medium-Dark Skin Tone 👨🏿‍🚒 Man Firefighter: Dark Skin Tone 👩‍🚒 Woman Firefighter 👩🏻‍🚒 Woman Firefighter: Light Skin Tone 👩🏼‍🚒 Woman Firefighter: Medium-Light Skin Tone 👩🏽‍🚒 Woman Firefighter: Medium Skin Tone 👩🏾‍🚒 Woman Firefighter: Medium-Dark Skin Tone 👩🏿‍🚒 Woman Firefighter: Dark Skin Tone 👮 Police Officer 👮🏻 Police Officer: Light Skin Tone 👮🏼 Police Officer: Medium-Light Skin Tone 👮🏽 Police Officer: Medium Skin Tone 👮🏾 Police Officer: Medium-Dark Skin Tone 👮🏿 Police Officer: Dark Skin Tone 👮‍♂️ Man Police Officer 👮🏻‍♂️ Man Police Officer: Light Skin Tone 👮🏼‍♂️ Man Police Officer: Medium-Light Skin Tone 👮🏽‍♂️ Man Police Officer: Medium Skin Tone 👮🏾‍♂️ Man Police Officer: Medium-Dark Skin Tone 👮🏿‍♂️ Man Police Officer: Dark Skin Tone 👮‍♀️ Woman Police Officer 👮🏻‍♀️ Woman Police Officer: Light Skin Tone 👮🏼‍♀️ Woman Police Officer: Medium-Light Skin Tone 👮🏽‍♀️ Woman Police Officer: Medium Skin Tone 👮🏾‍♀️ Woman Police Officer: Medium-Dark Skin Tone 👮🏿‍♀️ Woman Police Officer: Dark Skin Tone 🕵 Detective 🕵🏻 Detective: Light Skin Tone 🕵🏼 Detective: Medium-Light Skin Tone 🕵🏽 Detective: Medium Skin Tone 🕵🏾 Detective: Medium-Dark Skin Tone 🕵🏿 Detective: Dark Skin Tone 🕵️‍♂️ Man Detective 🕵🏻‍♂️ Man Detective: Light Skin Tone 🕵🏼‍♂️ Man Detective: Medium-Light Skin Tone 🕵🏽‍♂️ Man Detective: Medium Skin Tone 🕵🏾‍♂️ Man Detective: Medium-Dark Skin Tone 🕵🏿‍♂️ Man Detective: Dark Skin Tone 🕵️‍♀️ Woman Detective 🕵🏻‍♀️ Woman Detective: Light Skin Tone 🕵🏼‍♀️ Woman Detective: Medium-Light Skin Tone 🕵🏽‍♀️ Woman Detective: Medium Skin Tone 🕵🏾‍♀️ Woman Detective: Medium-Dark Skin Tone 🕵🏿‍♀️ Woman Detective: Dark Skin Tone 💂 Guard 💂🏻 Guard: Light Skin Tone 💂🏼 Guard: Medium-Light Skin Tone 💂🏽 Guard: Medium Skin Tone 💂🏾 Guard: Medium-Dark Skin Tone 💂🏿 Guard: Dark Skin Tone 💂‍♂️ Man Guard 💂🏻‍♂️ Man Guard: Light Skin Tone 💂🏼‍♂️ Man Guard: Medium-Light Skin Tone 💂🏽‍♂️ Man Guard: Medium Skin Tone 💂🏾‍♂️ Man Guard: Medium-Dark Skin Tone 💂🏿‍♂️ Man Guard: Dark Skin Tone 💂‍♀️ Woman Guard 💂🏻‍♀️ Woman Guard: Light Skin Tone 💂🏼‍♀️ Woman Guard: Medium-Light Skin Tone 💂🏽‍♀️ Woman Guard: Medium Skin Tone 💂🏾‍♀️ Woman Guard: Medium-Dark Skin Tone 💂🏿‍♀️ Woman Guard: Dark Skin Tone 👷 Construction Worker 👷🏻 Construction Worker: Light Skin Tone 👷🏼 Construction Worker: Medium-Light Skin Tone 👷🏽 Construction Worker: Medium Skin Tone 👷🏾 Construction Worker: Medium-Dark Skin Tone 👷🏿 Construction Worker: Dark Skin Tone 👷‍♂️ Man Construction Worker 👷🏻‍♂️ Man Construction Worker: Light Skin Tone 👷🏼‍♂️ Man Construction Worker: Medium-Light Skin Tone 👷🏽‍♂️ Man Construction Worker: Medium Skin Tone 👷🏾‍♂️ Man Construction Worker: Medium-Dark Skin Tone 👷🏿‍♂️ Man Construction Worker: Dark Skin Tone 👷‍♀️ Woman Construction Worker 👷🏻‍♀️ Woman Construction Worker: Light Skin Tone 👷🏼‍♀️ Woman Construction Worker: Medium-Light Skin Tone 👷🏽‍♀️ Woman Construction Worker: Medium Skin Tone 👷🏾‍♀️ Woman Construction Worker: Medium-Dark Skin Tone 👷🏿‍♀️ Woman Construction Worker: Dark Skin Tone 🤴 Prince 🤴🏻 Prince: Light Skin Tone 🤴🏼 Prince: Medium-Light Skin Tone 🤴🏽 Prince: Medium Skin Tone 🤴🏾 Prince: Medium-Dark Skin Tone 🤴🏿 Prince: Dark Skin Tone 👸 Princess 👸🏻 Princess: Light Skin Tone 👸🏼 Princess: Medium-Light Skin Tone 👸🏽 Princess: Medium Skin Tone 👸🏾 Princess: Medium-Dark Skin Tone 👸🏿 Princess: Dark Skin Tone 👳 Person Wearing Turban 👳🏻 Person Wearing Turban: Light Skin Tone 👳🏼 Person Wearing Turban: Medium-Light Skin Tone 👳🏽 Person Wearing Turban: Medium Skin Tone 👳🏾 Person Wearing Turban: Medium-Dark Skin Tone 👳🏿 Person Wearing Turban: Dark Skin Tone 👳‍♂️ Man Wearing Turban 👳🏻‍♂️ Man Wearing Turban: Light Skin Tone 👳🏼‍♂️ Man Wearing Turban: Medium-Light Skin Tone 👳🏽‍♂️ Man Wearing Turban: Medium Skin Tone 👳🏾‍♂️ Man Wearing Turban: Medium-Dark Skin Tone 👳🏿‍♂️ Man Wearing Turban: Dark Skin Tone 👳‍♀️ Woman Wearing Turban 👳🏻‍♀️ Woman Wearing Turban: Light Skin Tone 👳🏼‍♀️ Woman Wearing Turban: Medium-Light Skin Tone 👳🏽‍♀️ Woman Wearing Turban: Medium Skin Tone 👳🏾‍♀️ Woman Wearing Turban: Medium-Dark Skin Tone 👳🏿‍♀️ Woman Wearing Turban: Dark Skin Tone 👲 Man With Chinese Cap 👲🏻 Man With Chinese Cap: Light Skin Tone 👲🏼 Man With Chinese Cap: Medium-Light Skin Tone 👲🏽 Man With Chinese Cap: Medium Skin Tone 👲🏾 Man With Chinese Cap: Medium-Dark Skin Tone 👲🏿 Man With Chinese Cap: Dark Skin Tone 🧕 Woman With Headscarf 🧕🏻 Person With Headscarf: Light Skin Tone 🧕🏼 Person With Headscarf: Medium-Light Skin Tone 🧕🏽 Person With Headscarf: Medium Skin Tone 🧕🏾 Person With Headscarf: Medium-Dark Skin Tone 🧕🏿 Person With Headscarf: Dark Skin Tone 🧔 Bearded Person 🧔🏻 Bearded Person: Light Skin Tone 🧔🏼 Bearded Person: Medium-Light Skin Tone 🧔🏽 Bearded Person: Medium Skin Tone 🧔🏾 Bearded Person: Medium-Dark Skin Tone 🧔🏿 Bearded Person: Dark Skin Tone 👱 Blond-Haired Person 👱🏻 Blond-Haired Person: Light Skin Tone 👱🏼 Blond-Haired Person: Medium-Light Skin Tone 👱🏽 Blond-Haired Person: Medium Skin Tone 👱🏾 Blond-Haired Person: Medium-Dark Skin Tone 👱🏿 Blond-Haired Person: Dark Skin Tone 👱‍♂️ Blond-Haired Man 👱🏻‍♂️ Blond-Haired Man: Light Skin Tone 👱🏼‍♂️ Blond-Haired Man: Medium-Light Skin Tone 👱🏽‍♂️ Blond-Haired Man: Medium Skin Tone 👱🏾‍♂️ Blond-Haired Man: Medium-Dark Skin Tone 👱🏿‍♂️ Blond-Haired Man: Dark Skin Tone 👱‍♀️ Blond-Haired Woman 👱🏻‍♀️ Blond-Haired Woman: Light Skin Tone 👱🏼‍♀️ Blond-Haired Woman: Medium-Light Skin Tone 👱🏽‍♀️ Blond-Haired Woman: Medium Skin Tone 👱🏾‍♀️ Blond-Haired Woman: Medium-Dark Skin Tone 👱🏿‍♀️ Blond-Haired Woman: Dark Skin Tone 🤵 Man in Tuxedo 🤵🏻 Man in Tuxedo: Light Skin Tone 🤵🏼 Man in Tuxedo: Medium-Light Skin Tone 🤵🏽 Man in Tuxedo: Medium Skin Tone 🤵🏾 Man in Tuxedo: Medium-Dark Skin Tone 🤵🏿 Man in Tuxedo: Dark Skin Tone 👰 Bride With Veil 👰🏻 Bride With Veil: Light Skin Tone 👰🏼 Bride With Veil: Medium-Light Skin Tone 👰🏽 Bride With Veil: Medium Skin Tone 👰🏾 Bride With Veil: Medium-Dark Skin Tone 👰🏿 Bride With Veil: Dark Skin Tone 🤰 Pregnant Woman 🤰🏻 Pregnant Woman: Light Skin Tone 🤰🏼 Pregnant Woman: Medium-Light Skin Tone 🤰🏽 Pregnant Woman: Medium Skin Tone 🤰🏾 Pregnant Woman: Medium-Dark Skin Tone 🤰🏿 Pregnant Woman: Dark Skin Tone 🤱 Breast-Feeding 🤱🏻 Breast-Feeding: Light Skin Tone 🤱🏼 Breast-Feeding: Medium-Light Skin Tone 🤱🏽 Breast-Feeding: Medium Skin Tone 🤱🏾 Breast-Feeding: Medium-Dark Skin Tone 🤱🏿 Breast-Feeding: Dark Skin Tone 👼 Baby Angel 👼🏻 Baby Angel: Light Skin Tone 👼🏼 Baby Angel: Medium-Light Skin Tone 👼🏽 Baby Angel: Medium Skin Tone 👼🏾 Baby Angel: Medium-Dark Skin Tone 👼🏿 Baby Angel: Dark Skin Tone 🎅 Santa Claus 🎅🏻 Santa Claus: Light Skin Tone 🎅🏼 Santa Claus: Medium-Light Skin Tone 🎅🏽 Santa Claus: Medium Skin Tone 🎅🏾 Santa Claus: Medium-Dark Skin Tone 🎅🏿 Santa Claus: Dark Skin Tone 🤶 Mrs. Claus 🤶🏻 Mrs. Claus: Light Skin Tone 🤶🏼 Mrs. Claus: Medium-Light Skin Tone 🤶🏽 Mrs. Claus: Medium Skin Tone 🤶🏾 Mrs. Claus: Medium-Dark Skin Tone 🤶🏿 Mrs. Claus: Dark Skin Tone 🧙 Mage 🧙🏻 Mage: Light Skin Tone 🧙🏼 Mage: Medium-Light Skin Tone 🧙🏽 Mage: Medium Skin Tone 🧙🏾 Mage: Medium-Dark Skin Tone 🧙🏿 Mage: Dark Skin Tone 🧙‍♀️ Woman Mage 🧙🏻‍♀️ Woman Mage: Light Skin Tone 🧙🏼‍♀️ Woman Mage: Medium-Light Skin Tone 🧙🏽‍♀️ Woman Mage: Medium Skin Tone 🧙🏾‍♀️ Woman Mage: Medium-Dark Skin Tone 🧙🏿‍♀️ Woman Mage: Dark Skin Tone 🧙‍♂️ Man Mage 🧙🏻‍♂️ Man Mage: Light Skin Tone 🧙🏼‍♂️ Man Mage: Medium-Light Skin Tone 🧙🏽‍♂️ Man Mage: Medium Skin Tone 🧙🏾‍♂️ Man Mage: Medium-Dark Skin Tone 🧙🏿‍♂️ Man Mage: Dark Skin Tone 🧚 Fairy 🧚🏻 Fairy: Light Skin Tone 🧚🏼 Fairy: Medium-Light Skin Tone 🧚🏽 Fairy: Medium Skin Tone 🧚🏾 Fairy: Medium-Dark Skin Tone 🧚🏿 Fairy: Dark Skin Tone 🧚‍♀️ Woman Fairy 🧚🏻‍♀️ Woman Fairy: Light Skin Tone 🧚🏼‍♀️ Woman Fairy: Medium-Light Skin Tone 🧚🏽‍♀️ Woman Fairy: Medium Skin Tone 🧚🏾‍♀️ Woman Fairy: Medium-Dark Skin Tone 🧚🏿‍♀️ Woman Fairy: Dark Skin Tone 🧚‍♂️ Man Fairy 🧚🏻‍♂️ Man Fairy: Light Skin Tone 🧚🏼‍♂️ Man Fairy: Medium-Light Skin Tone 🧚🏽‍♂️ Man Fairy: Medium Skin Tone 🧚🏾‍♂️ Man Fairy: Medium-Dark Skin Tone 🧚🏿‍♂️ Man Fairy: Dark Skin Tone 🧛 Vampire 🧛🏻 Vampire: Light Skin Tone 🧛🏼 Vampire: Medium-Light Skin Tone 🧛🏽 Vampire: Medium Skin Tone 🧛🏾 Vampire: Medium-Dark Skin Tone 🧛🏿 Vampire: Dark Skin Tone 🧛‍♀️ Woman Vampire 🧛🏻‍♀️ Woman Vampire: Light Skin Tone 🧛🏼‍♀️ Woman Vampire: Medium-Light Skin Tone 🧛🏽‍♀️ Woman Vampire: Medium Skin Tone 🧛🏾‍♀️ Woman Vampire: Medium-Dark Skin Tone 🧛🏿‍♀️ Woman Vampire: Dark Skin Tone 🧛‍♂️ Man Vampire 🧛🏻‍♂️ Man Vampire: Light Skin Tone 🧛🏼‍♂️ Man Vampire: Medium-Light Skin Tone 🧛🏽‍♂️ Man Vampire: Medium Skin Tone 🧛🏾‍♂️ Man Vampire: Medium-Dark Skin Tone 👯🏻 Woman With Bunny Ears, Type-1-2 👯🏼 Woman With Bunny Ears, Type-3 🧛🏿‍♂️ Man Vampire: Dark Skin Tone 👯🏽 Woman With Bunny Ears, Type-4 👯🏾 Woman With Bunny Ears, Type-5 🧜 Merperson 👯🏿 Woman With Bunny Ears, Type-6 🧜🏻 Merperson: Light Skin Tone 👯🏻‍♂️ Men With Bunny Ears Partying, Type-1-2 🧜🏼 Merperson: Medium-Light Skin Tone 👯🏼‍♂️ Men With Bunny Ears Partying, Type-3 🧜🏽 Merperson: Medium Skin Tone 👯🏽‍♂️ Men With Bunny Ears Partying, Type-4 🧜🏾 Merperson: Medium-Dark Skin Tone 👯🏾‍♂️ Men With Bunny Ears Partying, Type-5 🧜🏿 Merperson: Dark Skin Tone 👯🏿‍♂️ Men With Bunny Ears Partying, Type-6 🧜‍♀️ Mermaid 👯🏻‍♀️ Women With Bunny Ears Partying, Type-1-2 🧜🏻‍♀️ Mermaid: Light Skin Tone 👯🏼‍♀️ Women With Bunny Ears Partying, Type-3 🧜🏼‍♀️ Mermaid: Medium-Light Skin Tone 👯🏽‍♀️ Women With Bunny Ears Partying, Type-4 👯🏾‍♀️ Women With Bunny Ears Partying, Type-5 🧜🏽‍♀️ Mermaid: Medium Skin Tone 👯🏿‍♀️ Women With Bunny Ears Partying, Type-6 🧜🏾‍♀️ Mermaid: Medium-Dark Skin Tone 🧜🏿‍♀️ Mermaid: Dark Skin Tone 🧜‍♂️ Merman 🧜🏻‍♂️ Merman: Light Skin Tone 🧜🏼‍♂️ Merman: Medium-Light Skin Tone 👫🏻 Man and Woman Holding Hands, Type-1-2 🧜🏽‍♂️ Merman: Medium Skin Tone 👫🏼 Man and Woman Holding Hands, Type-3 👫🏽 Man and Woman Holding Hands, Type-4 🧜🏾‍♂️ Merman: Medium-Dark Skin Tone 👫🏾 Man and Woman Holding Hands, Type-5 👫🏿 Man and Woman Holding Hands, Type-6 🧜🏿‍♂️ Merman: Dark Skin Tone 👬🏻 Two Men Holding Hands, Type-1-2 🧝 Elf 👬🏼 Two Men Holding Hands, Type-3 👬🏽 Two Men Holding Hands, Type-4 🧝🏻 Elf: Light Skin Tone 👬🏾 Two Men Holding Hands, Type-5 🧝🏼 Elf: Medium-Light Skin Tone 👬🏿 Two Men Holding Hands, Type-6 🧝🏽 Elf: Medium Skin Tone 🧝🏾 Elf: Medium-Dark Skin Tone 👭🏻 Two Women Holding Hands, Type-1-2 🧝🏿 Elf: Dark Skin Tone 🧝‍♀️ Woman Elf 👭🏼 Two Women Holding Hands, Type-3 👭🏽 Two Women Holding Hands, Type-4 🧝🏻‍♀️ Woman Elf: Light Skin Tone 👭🏾 Two Women Holding Hands, Type-5 👭🏿 Two Women Holding Hands, Type-6 🧝🏼‍♀️ Woman Elf: Medium-Light Skin Tone 🧝🏽‍♀️ Woman Elf: Medium Skin Tone 🧝🏾‍♀️ Woman Elf: Medium-Dark Skin Tone 🧝🏿‍♀️ Woman Elf: Dark Skin Tone 🧝‍♂️ Man Elf 👪🏻 Family, Type-1-2 🧝🏻‍♂️ Man Elf: Light Skin Tone 👪🏼 Family, Type-3 👪🏽 Family, Type-4 🧝🏼‍♂️ Man Elf: Medium-Light Skin Tone 👪🏾 Family, Type-5 👪🏿 Family, Type-6 🧝🏽‍♂️ Man Elf: Medium Skin Tone 🧝🏾‍♂️ Man Elf: Medium-Dark Skin Tone 🧝🏿‍♂️ Man Elf: Dark Skin Tone 🧞 Genie 🧞‍♀️ Woman Genie 🧞‍♂️ Man Genie 🧟 Zombie 🧟‍♀️ Woman Zombie 🧟‍♂️ Man Zombie 🙍 Person Frowning 🙍🏻 Person Frowning: Light Skin Tone 🙍🏼 Person Frowning: Medium-Light Skin Tone 🙍🏽 Person Frowning: Medium Skin Tone 🙍🏾 Person Frowning: Medium-Dark Skin Tone 🙍🏿 Person Frowning: Dark Skin Tone 🙍‍♂️ Man Frowning 🙍🏻‍♂️ Man Frowning: Light Skin Tone 🏻 Light Skin Tone 🏼 Medium-Light Skin Tone 🙍🏼‍♂️ Man Frowning: Medium-Light Skin Tone 🏽 Medium Skin Tone 🙍🏽‍♂️ Man Frowning: Medium Skin Tone 🏾 Medium-Dark Skin Tone 🏿 Dark Skin Tone 🙍🏾‍♂️ Man Frowning: Medium-Dark Skin Tone 🙍🏿‍♂️ Man Frowning: Dark Skin Tone 🙍‍♀️ Woman Frowning 🙍🏻‍♀️ Woman Frowning: Light Skin Tone 🙍🏼‍♀️ Woman Frowning: Medium-Light Skin Tone 🙍🏽‍♀️ Woman Frowning: Medium Skin Tone 🙍🏾‍♀️ Woman Frowning: Medium-Dark Skin Tone 🙍🏿‍♀️ Woman Frowning: Dark Skin Tone 🙎 Person Pouting 🙎🏻 Person Pouting: Light Skin Tone 🙎🏼 Person Pouting: Medium-Light Skin Tone 🙎🏽 Person Pouting: Medium Skin Tone 🙎🏾 Person Pouting: Medium-Dark Skin Tone 🙎🏿 Person Pouting: Dark Skin Tone 🙎‍♂️ Man Pouting 🙎🏻‍♂️ Man Pouting: Light Skin Tone 🙎🏼‍♂️ Man Pouting: Medium-Light Skin Tone 🙎🏽‍♂️ Man Pouting: Medium Skin Tone 🙎🏾‍♂️ Man Pouting: Medium-Dark Skin Tone 🙎🏿‍♂️ Man Pouting: Dark Skin Tone 🙎‍♀️ Woman Pouting 🙎🏻‍♀️ Woman Pouting: Light Skin Tone 🙎🏼‍♀️ Woman Pouting: Medium-Light Skin Tone 🙎🏽‍♀️ Woman Pouting: Medium Skin Tone 🙎🏾‍♀️ Woman Pouting: Medium-Dark Skin Tone 🙎🏿‍♀️ Woman Pouting: Dark Skin Tone 🙅 Person Gesturing No 🙅🏻 Person Gesturing No: Light Skin Tone 🙅🏼 Person Gesturing No: Medium-Light Skin Tone 🙅🏽 Person Gesturing No: Medium Skin Tone 🙅🏾 Person Gesturing No: Medium-Dark Skin Tone 🙅🏿 Person Gesturing No: Dark Skin Tone 🙅‍♂️ Man Gesturing No 🙅🏻‍♂️ Man Gesturing No: Light Skin Tone 🙅🏼‍♂️ Man Gesturing No: Medium-Light Skin Tone 🙅🏽‍♂️ Man Gesturing No: Medium Skin Tone 🙅🏾‍♂️ Man Gesturing No: Medium-Dark Skin Tone 🙅🏿‍♂️ Man Gesturing No: Dark Skin Tone 🙅‍♀️ Woman Gesturing No 🙅🏻‍♀️ Woman Gesturing No: Light Skin Tone 🙅🏼‍♀️ Woman Gesturing No: Medium-Light Skin Tone 🙅🏽‍♀️ Woman Gesturing No: Medium Skin Tone 🙅🏾‍♀️ Woman Gesturing No: Medium-Dark Skin Tone 🙅🏿‍♀️ Woman Gesturing No: Dark Skin Tone 🙆 Person Gesturing OK 🙆🏻 Person Gesturing OK: Light Skin Tone 🙆🏼 Person Gesturing OK: Medium-Light Skin Tone 🙆🏽 Person Gesturing OK: Medium Skin Tone 🙆🏾 Person Gesturing OK: Medium-Dark Skin Tone 🙆🏿 Person Gesturing OK: Dark Skin Tone 🙆‍♂️ Man Gesturing OK 🙆🏻‍♂️ Man Gesturing OK: Light Skin Tone 🙆🏼‍♂️ Man Gesturing OK: Medium-Light Skin Tone 🙆🏽‍♂️ Man Gesturing OK: Medium Skin Tone 🙆🏾‍♂️ Man Gesturing OK: Medium-Dark Skin Tone 🙆🏿‍♂️ Man Gesturing OK: Dark Skin Tone 🙆‍♀️ Woman Gesturing OK 🙆🏻‍♀️ Woman Gesturing OK: Light Skin Tone 🙆🏼‍♀️ Woman Gesturing OK: Medium-Light Skin Tone 🙆🏽‍♀️ Woman Gesturing OK: Medium Skin Tone 🙆🏾‍♀️ Woman Gesturing OK: Medium-Dark Skin Tone 🙆🏿‍♀️ Woman Gesturing OK: Dark Skin Tone 💁 Person Tipping Hand 💁🏻 Person Tipping Hand: Light Skin Tone 💁🏼 Person Tipping Hand: Medium-Light Skin Tone 💁🏽 Person Tipping Hand: Medium Skin Tone 💁🏾 Person Tipping Hand: Medium-Dark Skin Tone 💁🏿 Person Tipping Hand: Dark Skin Tone 💁‍♂️ Man Tipping Hand 💁🏻‍♂️ Man Tipping Hand: Light Skin Tone 💁🏼‍♂️ Man Tipping Hand: Medium-Light Skin Tone 💁🏽‍♂️ Man Tipping Hand: Medium Skin Tone 💁🏾‍♂️ Man Tipping Hand: Medium-Dark Skin Tone 💁🏿‍♂️ Man Tipping Hand: Dark Skin Tone 💁‍♀️ Woman Tipping Hand 💁🏻‍♀️ Woman Tipping Hand: Light Skin Tone 💁🏼‍♀️ Woman Tipping Hand: Medium-Light Skin Tone 💁🏽‍♀️ Woman Tipping Hand: Medium Skin Tone 💁🏾‍♀️ Woman Tipping Hand: Medium-Dark Skin Tone 💁🏿‍♀️ Woman Tipping Hand: Dark Skin Tone 🙋 Person Raising Hand 🙋🏻 Person Raising Hand: Light Skin Tone 🙋🏼 Person Raising Hand: Medium-Light Skin Tone 🙋🏽 Person Raising Hand: Medium Skin Tone 🙋🏾 Person Raising Hand: Medium-Dark Skin Tone 🙋🏿 Person Raising Hand: Dark Skin Tone 🙋‍♂️ Man Raising Hand 🙋🏻‍♂️ Man Raising Hand: Light Skin Tone 🙋🏼‍♂️ Man Raising Hand: Medium-Light Skin Tone 🙋🏽‍♂️ Man Raising Hand: Medium Skin Tone 🙋🏾‍♂️ Man Raising Hand: Medium-Dark Skin Tone 🙋🏿‍♂️ Man Raising Hand: Dark Skin Tone 🙋‍♀️ Woman Raising Hand 🙋🏻‍♀️ Woman Raising Hand: Light Skin Tone 🙋🏼‍♀️ Woman Raising Hand: Medium-Light Skin Tone 🙋🏽‍♀️ Woman Raising Hand: Medium Skin Tone 🙋🏾‍♀️ Woman Raising Hand: Medium-Dark Skin Tone 🙋🏿‍♀️ Woman Raising Hand: Dark Skin Tone 🙇 Person Bowing 🙇🏻 Person Bowing: Light Skin Tone 🙇🏼 Person Bowing: Medium-Light Skin Tone 🙇🏽 Person Bowing: Medium Skin Tone 🙇🏾 Person Bowing: Medium-Dark Skin Tone 🙇🏿 Person Bowing: Dark Skin Tone 🙇‍♂️ Man Bowing 🙇🏻‍♂️ Man Bowing: Light Skin Tone 🤝🏻 Handshake, Type-1-2 🙇🏼‍♂️ Man Bowing: Medium-Light Skin Tone 🤝🏼 Handshake, Type-3 🤝🏽 Handshake, Type-4 🙇🏽‍♂️ Man Bowing: Medium Skin Tone 🤝🏾 Handshake, Type-5 🤝🏿 Handshake, Type-6 🙇🏾‍♂️ Man Bowing: Medium-Dark Skin Tone 🙇🏿‍♂️ Man Bowing: Dark Skin Tone 🙇‍♀️ Woman Bowing 🙇🏻‍♀️ Woman Bowing: Light Skin Tone 🙇🏼‍♀️ Woman Bowing: Medium-Light Skin Tone 🙇🏽‍♀️ Woman Bowing: Medium Skin Tone 🙇🏾‍♀️ Woman Bowing: Medium-Dark Skin Tone 🙇🏿‍♀️ Woman Bowing: Dark Skin Tone 🤦 Person Facepalming 🤦🏻 Person Facepalming: Light Skin Tone 🤦🏼 Person Facepalming: Medium-Light Skin Tone 🤦🏽 Person Facepalming: Medium Skin Tone 🤦🏾 Person Facepalming: Medium-Dark Skin Tone 🤦🏿 Person Facepalming: Dark Skin Tone 🤦‍♂️ Man Facepalming 🤦🏻‍♂️ Man Facepalming: Light Skin Tone 🤦🏼‍♂️ Man Facepalming: Medium-Light Skin Tone 🤦🏽‍♂️ Man Facepalming: Medium Skin Tone 🤦🏾‍♂️ Man Facepalming: Medium-Dark Skin Tone 🤦🏿‍♂️ Man Facepalming: Dark Skin Tone 🤦‍♀️ Woman Facepalming 🤦🏻‍♀️ Woman Facepalming: Light Skin Tone 🤦🏼‍♀️ Woman Facepalming: Medium-Light Skin Tone 🤦🏽‍♀️ Woman Facepalming: Medium Skin Tone 🤦🏾‍♀️ Woman Facepalming: Medium-Dark Skin Tone 🤦🏿‍♀️ Woman Facepalming: Dark Skin Tone 🤷 Person Shrugging 🤷🏻 Person Shrugging: Light Skin Tone 🤷🏼 Person Shrugging: Medium-Light Skin Tone 🤷🏽 Person Shrugging: Medium Skin Tone 🤷🏾 Person Shrugging: Medium-Dark Skin Tone 🤷🏿 Person Shrugging: Dark Skin Tone 🤷‍♂️ Man Shrugging 🤷🏻‍♂️ Man Shrugging: Light Skin Tone 🤷🏼‍♂️ Man Shrugging: Medium-Light Skin Tone 🤷🏽‍♂️ Man Shrugging: Medium Skin Tone 🤷🏾‍♂️ Man Shrugging: Medium-Dark Skin Tone 🤷🏿‍♂️ Man Shrugging: Dark Skin Tone 🤷‍♀️ Woman Shrugging 🤷🏻‍♀️ Woman Shrugging: Light Skin Tone 🤷🏼‍♀️ Woman Shrugging: Medium-Light Skin Tone 🤷🏽‍♀️ Woman Shrugging: Medium Skin Tone 🤷🏾‍♀️ Woman Shrugging: Medium-Dark Skin Tone 🤷🏿‍♀️ Woman Shrugging: Dark Skin Tone 💆 Person Getting Massage 💆🏻 Person Getting Massage: Light Skin Tone 💆🏼 Person Getting Massage: Medium-Light Skin Tone 💆🏽 Person Getting Massage: Medium Skin Tone 💆🏾 Person Getting Massage: Medium-Dark Skin Tone 💆🏿 Person Getting Massage: Dark Skin Tone 💆‍♂️ Man Getting Massage 💆🏻‍♂️ Man Getting Massage: Light Skin Tone 💆🏼‍♂️ Man Getting Massage: Medium-Light Skin Tone 💆🏽‍♂️ Man Getting Massage: Medium Skin Tone 💆🏾‍♂️ Man Getting Massage: Medium-Dark Skin Tone 💆🏿‍♂️ Man Getting Massage: Dark Skin Tone 💆‍♀️ Woman Getting Massage 💆🏻‍♀️ Woman Getting Massage: Light Skin Tone 💆🏼‍♀️ Woman Getting Massage: Medium-Light Skin Tone 💆🏽‍♀️ Woman Getting Massage: Medium Skin Tone 💆🏾‍♀️ Woman Getting Massage: Medium-Dark Skin Tone 💆🏿‍♀️ Woman Getting Massage: Dark Skin Tone 💇 Person Getting Haircut 💇🏻 Person Getting Haircut: Light Skin Tone 💇🏼 Person Getting Haircut: Medium-Light Skin Tone 💇🏽 Person Getting Haircut: Medium Skin Tone 💇🏾 Person Getting Haircut: Medium-Dark Skin Tone 💇🏿 Person Getting Haircut: Dark Skin Tone 💇‍♂️ Man Getting Haircut 💇🏻‍♂️ Man Getting Haircut: Light Skin Tone 💇🏼‍♂️ Man Getting Haircut: Medium-Light Skin Tone 💇🏽‍♂️ Man Getting Haircut: Medium Skin Tone 💇🏾‍♂️ Man Getting Haircut: Medium-Dark Skin Tone 💇🏿‍♂️ Man Getting Haircut: Dark Skin Tone 💇‍♀️ Woman Getting Haircut 💇🏻‍♀️ Woman Getting Haircut: Light Skin Tone 💇🏼‍♀️ Woman Getting Haircut: Medium-Light Skin Tone 💇🏽‍♀️ Woman Getting Haircut: Medium Skin Tone 💇🏾‍♀️ Woman Getting Haircut: Medium-Dark Skin Tone 💇🏿‍♀️ Woman Getting Haircut: Dark Skin Tone 🚶 Person Walking 🚶🏻 Person Walking: Light Skin Tone 🚶🏼 Person Walking: Medium-Light Skin Tone 🚶🏽 Person Walking: Medium Skin Tone 🚶🏾 Person Walking: Medium-Dark Skin Tone 🚶🏿 Person Walking: Dark Skin Tone 🚶‍♂️ Man Walking 🚶🏻‍♂️ Man Walking: Light Skin Tone 🚶🏼‍♂️ Man Walking: Medium-Light Skin Tone 🚶🏽‍♂️ Man Walking: Medium Skin Tone 🚶🏾‍♂️ Man Walking: Medium-Dark Skin Tone 🚶🏿‍♂️ Man Walking: Dark Skin Tone 🚶‍♀️ Woman Walking 🚶🏻‍♀️ Woman Walking: Light Skin Tone 🚶🏼‍♀️ Woman Walking: Medium-Light Skin Tone 🚶🏽‍♀️ Woman Walking: Medium Skin Tone 🚶🏾‍♀️ Woman Walking: Medium-Dark Skin Tone 🚶🏿‍♀️ Woman Walking: Dark Skin Tone 🏃 Person Running 🏃🏻 Person Running: Light Skin Tone 🏃🏼 Person Running: Medium-Light Skin Tone 🏃🏽 Person Running: Medium Skin Tone 🏃🏾 Person Running: Medium-Dark Skin Tone 🏃🏿 Person Running: Dark Skin Tone 🏃‍♂️ Man Running 🏃🏻‍♂️ Man Running: Light Skin Tone 🏃🏼‍♂️ Man Running: Medium-Light Skin Tone 🏃🏽‍♂️ Man Running: Medium Skin Tone 🏃🏾‍♂️ Man Running: Medium-Dark Skin Tone 🏃🏿‍♂️ Man Running: Dark Skin Tone 🏃‍♀️ Woman Running 🏃🏻‍♀️ Woman Running: Light Skin Tone 🏃🏼‍♀️ Woman Running: Medium-Light Skin Tone 🏃🏽‍♀️ Woman Running: Medium Skin Tone 🏃🏾‍♀️ Woman Running: Medium-Dark Skin Tone 🏃🏿‍♀️ Woman Running: Dark Skin Tone 💃 Woman Dancing 💃🏻 Woman Dancing: Light Skin Tone 💃🏼 Woman Dancing: Medium-Light Skin Tone 💃🏽 Woman Dancing: Medium Skin Tone 💃🏾 Woman Dancing: Medium-Dark Skin Tone 💃🏿 Woman Dancing: Dark Skin Tone 🕺 Man Dancing 🕺🏻 Man Dancing: Light Skin Tone 🕺🏼 Man Dancing: Medium-Light Skin Tone 🕺🏽 Man Dancing: Medium Skin Tone 🕺🏾 Man Dancing: Medium-Dark Skin Tone 🕺🏿 Man Dancing: Dark Skin Tone 👯 People With Bunny Ears Partying 👯‍♂️ Men With Bunny Ears Partying 👯‍♀️ Women With Bunny Ears Partying 🧖 Person in Steamy Room 🧖🏻 Person in Steamy Room: Light Skin Tone 🧖🏼 Person in Steamy Room: Medium-Light Skin Tone 🧖🏽 Person in Steamy Room: Medium Skin Tone 🧖🏾 Person in Steamy Room: Medium-Dark Skin Tone 🧖🏿 Person in Steamy Room: Dark Skin Tone 🧖‍♀️ Woman in Steamy Room 🧖🏻‍♀️ Woman in Steamy Room: Light Skin Tone 🧖🏼‍♀️ Woman in Steamy Room: Medium-Light Skin Tone 🧖🏽‍♀️ Woman in Steamy Room: Medium Skin Tone 🧖🏾‍♀️ Woman in Steamy Room: Medium-Dark Skin Tone 🧖🏿‍♀️ Woman in Steamy Room: Dark Skin Tone 🧖‍♂️ Man in Steamy Room 🧖🏻‍♂️ Man in Steamy Room: Light Skin Tone 🧖🏼‍♂️ Man in Steamy Room: Medium-Light Skin Tone 🧖🏽‍♂️ Man in Steamy Room: Medium Skin Tone 🧖🏾‍♂️ Man in Steamy Room: Medium-Dark Skin Tone 🧖🏿‍♂️ Man in Steamy Room: Dark Skin Tone 🧗 Person Climbing 🧗🏻 Person Climbing: Light Skin Tone 🧗🏼 Person Climbing: Medium-Light Skin Tone 🧗🏽 Person Climbing: Medium Skin Tone 🧗🏾 Person Climbing: Medium-Dark Skin Tone 🧗🏿 Person Climbing: Dark Skin Tone 🧗‍♀️ Woman Climbing 🧗🏻‍♀️ Woman Climbing: Light Skin Tone 🧗🏼‍♀️ Woman Climbing: Medium-Light Skin Tone 🧗🏽‍♀️ Woman Climbing: Medium Skin Tone 🧗🏾‍♀️ Woman Climbing: Medium-Dark Skin Tone 🧗🏿‍♀️ Woman Climbing: Dark Skin Tone 🧗‍♂️ Man Climbing 🧗🏻‍♂️ Man Climbing: Light Skin Tone 🧗🏼‍♂️ Man Climbing: Medium-Light Skin Tone 🧗🏽‍♂️ Man Climbing: Medium Skin Tone 🧗🏾‍♂️ Man Climbing: Medium-Dark Skin Tone 🧗🏿‍♂️ Man Climbing: Dark Skin Tone 🧘 Person in Lotus Position 🧘🏻 Person in Lotus Position: Light Skin Tone 🧘🏼 Person in Lotus Position: Medium-Light Skin Tone 🧘🏽 Person in Lotus Position: Medium Skin Tone 🧘🏾 Person in Lotus Position: Medium-Dark Skin Tone 🧘🏿 Person in Lotus Position: Dark Skin Tone 🧘‍♀️ Woman in Lotus Position 🧘🏻‍♀️ Woman in Lotus Position: Light Skin Tone 🧘🏼‍♀️ Woman in Lotus Position: Medium-Light Skin Tone 🧘🏽‍♀️ Woman in Lotus Position: Medium Skin Tone 🧘🏾‍♀️ Woman in Lotus Position: Medium-Dark Skin Tone 🧘🏿‍♀️ Woman in Lotus Position: Dark Skin Tone 🧘‍♂️ Man in Lotus Position 🧘🏻‍♂️ Man in Lotus Position: Light Skin Tone 🧘🏼‍♂️ Man in Lotus Position: Medium-Light Skin Tone 🧘🏽‍♂️ Man in Lotus Position: Medium Skin Tone 🧘🏾‍♂️ Man in Lotus Position: Medium-Dark Skin Tone 🧘🏿‍♂️ Man in Lotus Position: Dark Skin Tone 🛀 Person Taking Bath 🛀🏻 Person Taking Bath: Light Skin Tone 🛀🏼 Person Taking Bath: Medium-Light Skin Tone 🛀🏽 Person Taking Bath: Medium Skin Tone 🛀🏾 Person Taking Bath: Medium-Dark Skin Tone 🛀🏿 Person Taking Bath: Dark Skin Tone 🛌 Person in Bed 🛌🏻 Person in Bed: Light Skin Tone 🛌🏼 Person in Bed: Medium-Light Skin Tone 🛌🏽 Person in Bed: Medium Skin Tone 🛌🏾 Person in Bed: Medium-Dark Skin Tone 🛌🏿 Person in Bed: Dark Skin Tone 🕴 Man in Business Suit Levitating 🕴🏻 Man in Business Suit Levitating: Light Skin Tone 🕴🏼 Man in Business Suit Levitating: Medium-Light Skin Tone 🕴🏽 Man in Business Suit Levitating: Medium Skin Tone 🕴🏾 Man in Business Suit Levitating: Medium-Dark Skin Tone 🕴🏿 Man in Business Suit Levitating: Dark Skin Tone 🗣 Speaking Head 👤 Bust in Silhouette 👥 Busts in Silhouette 🤺 Person Fencing 🏇 Horse Racing 🏇🏻 Horse Racing: Light Skin Tone 🏇🏼 Horse Racing: Medium-Light Skin Tone 🏇🏽 Horse Racing: Medium Skin Tone 🏇🏾 Horse Racing: Medium-Dark Skin Tone 🏇🏿 Horse Racing: Dark Skin Tone ⛷ Skier 🏂 Snowboarder 🏂🏻 Snowboarder: Light Skin Tone 🏂🏼 Snowboarder: Medium-Light Skin Tone 🏂🏽 Snowboarder: Medium Skin Tone 🏂🏾 Snowboarder: Medium-Dark Skin Tone 🏂🏿 Snowboarder: Dark Skin Tone 🏌 Person Golfing 🏌🏻 Person Golfing: Light Skin Tone 🏌🏼 Person Golfing: Medium-Light Skin Tone 🏌🏽 Person Golfing: Medium Skin Tone 🏌🏾 Person Golfing: Medium-Dark Skin Tone 🏌🏿 Person Golfing: Dark Skin Tone 🏌️‍♂️ Man Golfing 🏌🏻‍♂️ Man Golfing: Light Skin Tone 🏌🏼‍♂️ Man Golfing: Medium-Light Skin Tone 🏌🏽‍♂️ Man Golfing: Medium Skin Tone 🏌🏾‍♂️ Man Golfing: Medium-Dark Skin Tone 🏌🏿‍♂️ Man Golfing: Dark Skin Tone 🏌️‍♀️ Woman Golfing 🏌🏻‍♀️ Woman Golfing: Light Skin Tone 🏌🏼‍♀️ Woman Golfing: Medium-Light Skin Tone 🏌🏽‍♀️ Woman Golfing: Medium Skin Tone 🏌🏾‍♀️ Woman Golfing: Medium-Dark Skin Tone 🏌🏿‍♀️ Woman Golfing: Dark Skin Tone 🏄 Person Surfing 🏄🏻 Person Surfing: Light Skin Tone 🏄🏼 Person Surfing: Medium-Light Skin Tone 🏄🏽 Person Surfing: Medium Skin Tone 🏄🏾 Person Surfing: Medium-Dark Skin Tone 🏄🏿 Person Surfing: Dark Skin Tone 🏄‍♂️ Man Surfing 🏄🏻‍♂️ Man Surfing: Light Skin Tone 🏄🏼‍♂️ Man Surfing: Medium-Light Skin Tone 🏄🏽‍♂️ Man Surfing: Medium Skin Tone 🏄🏾‍♂️ Man Surfing: Medium-Dark Skin Tone 🏄🏿‍♂️ Man Surfing: Dark Skin Tone 🏄‍♀️ Woman Surfing 🏄🏻‍♀️ Woman Surfing: Light Skin Tone 🏄🏼‍♀️ Woman Surfing: Medium-Light Skin Tone 🏄🏽‍♀️ Woman Surfing: Medium Skin Tone 🏄🏾‍♀️ Woman Surfing: Medium-Dark Skin Tone 🏄🏿‍♀️ Woman Surfing: Dark Skin Tone 🚣 Person Rowing Boat 🚣🏻 Person Rowing Boat: Light Skin Tone 🚣🏼 Person Rowing Boat: Medium-Light Skin Tone 🚣🏽 Person Rowing Boat: Medium Skin Tone 🚣🏾 Person Rowing Boat: Medium-Dark Skin Tone 🚣🏿 Person Rowing Boat: Dark Skin Tone 🚣‍♂️ Man Rowing Boat 🚣🏻‍♂️ Man Rowing Boat: Light Skin Tone 🚣🏼‍♂️ Man Rowing Boat: Medium-Light Skin Tone 🚣🏽‍♂️ Man Rowing Boat: Medium Skin Tone 🚣🏾‍♂️ Man Rowing Boat: Medium-Dark Skin Tone 🚣🏿‍♂️ Man Rowing Boat: Dark Skin Tone 🚣‍♀️ Woman Rowing Boat 🚣🏻‍♀️ Woman Rowing Boat: Light Skin Tone 🚣🏼‍♀️ Woman Rowing Boat: Medium-Light Skin Tone 🚣🏽‍♀️ Woman Rowing Boat: Medium Skin Tone 🚣🏾‍♀️ Woman Rowing Boat: Medium-Dark Skin Tone 🚣🏿‍♀️ Woman Rowing Boat: Dark Skin Tone 🏊 Person Swimming 🏊🏻 Person Swimming: Light Skin Tone 🏊🏼 Person Swimming: Medium-Light Skin Tone 🏊🏽 Person Swimming: Medium Skin Tone 🏊🏾 Person Swimming: Medium-Dark Skin Tone 🏊🏿 Person Swimming: Dark Skin Tone 🏊‍♂️ Man Swimming 🏊🏻‍♂️ Man Swimming: Light Skin Tone 🏊🏼‍♂️ Man Swimming: Medium-Light Skin Tone 🏊🏽‍♂️ Man Swimming: Medium Skin Tone 🏊🏾‍♂️ Man Swimming: Medium-Dark Skin Tone 🏊🏿‍♂️ Man Swimming: Dark Skin Tone 🏊‍♀️ Woman Swimming 🏊🏻‍♀️ Woman Swimming: Light Skin Tone 🏊🏼‍♀️ Woman Swimming: Medium-Light Skin Tone 🏊🏽‍♀️ Woman Swimming: Medium Skin Tone 🏊🏾‍♀️ Woman Swimming: Medium-Dark Skin Tone 🏊🏿‍♀️ Woman Swimming: Dark Skin Tone ⛹ Person Bouncing Ball ⛹🏻 Person Bouncing Ball: Light Skin Tone ⛹🏼 Person Bouncing Ball: Medium-Light Skin Tone ⛹🏽 Person Bouncing Ball: Medium Skin Tone ⛹🏾 Person Bouncing Ball: Medium-Dark Skin Tone ⛹🏿 Person Bouncing Ball: Dark Skin Tone ⛹️‍♂️ Man Bouncing Ball ⛹🏻‍♂️ Man Bouncing Ball: Light Skin Tone ⛹🏼‍♂️ Man Bouncing Ball: Medium-Light Skin Tone ⛹🏽‍♂️ Man Bouncing Ball: Medium Skin Tone ⛹🏾‍♂️ Man Bouncing Ball: Medium-Dark Skin Tone ⛹🏿‍♂️ Man Bouncing Ball: Dark Skin Tone ⛹️‍♀️ Woman Bouncing Ball ⛹🏻‍♀️ Woman Bouncing Ball: Light Skin Tone ⛹🏼‍♀️ Woman Bouncing Ball: Medium-Light Skin Tone ⛹🏽‍♀️ Woman Bouncing Ball: Medium Skin Tone ⛹🏾‍♀️ Woman Bouncing Ball: Medium-Dark Skin Tone ⛹🏿‍♀️ Woman Bouncing Ball: Dark Skin Tone 🏋 Person Lifting Weights 🏋🏻 Person Lifting Weights: Light Skin Tone 🏋🏼 Person Lifting Weights: Medium-Light Skin Tone 🏋🏽 Person Lifting Weights: Medium Skin Tone 🏋🏾 Person Lifting Weights: Medium-Dark Skin Tone 🏋🏿 Person Lifting Weights: Dark Skin Tone 🏋️‍♂️ Man Lifting Weights 🏋🏻‍♂️ Man Lifting Weights: Light Skin Tone 🏋🏼‍♂️ Man Lifting Weights: Medium-Light Skin Tone 🏋🏽‍♂️ Man Lifting Weights: Medium Skin Tone 🏋🏾‍♂️ Man Lifting Weights: Medium-Dark Skin Tone 🏋🏿‍♂️ Man Lifting Weights: Dark Skin Tone 🏋️‍♀️ Woman Lifting Weights 🏋🏻‍♀️ Woman Lifting Weights: Light Skin Tone 🏋🏼‍♀️ Woman Lifting Weights: Medium-Light Skin Tone 🏋🏽‍♀️ Woman Lifting Weights: Medium Skin Tone 🏋🏾‍♀️ Woman Lifting Weights: Medium-Dark Skin Tone 🏋🏿‍♀️ Woman Lifting Weights: Dark Skin Tone 🚴 Person Biking 🚴🏻 Person Biking: Light Skin Tone 🚴🏼 Person Biking: Medium-Light Skin Tone 🚴🏽 Person Biking: Medium Skin Tone 🚴🏾 Person Biking: Medium-Dark Skin Tone 🚴🏿 Person Biking: Dark Skin Tone 🚴‍♂️ Man Biking 🚴🏻‍♂️ Man Biking: Light Skin Tone 🚴🏼‍♂️ Man Biking: Medium-Light Skin Tone 🚴🏽‍♂️ Man Biking: Medium Skin Tone 🚴🏾‍♂️ Man Biking: Medium-Dark Skin Tone 🚴🏿‍♂️ Man Biking: Dark Skin Tone 🚴‍♀️ Woman Biking 🚴🏻‍♀️ Woman Biking: Light Skin Tone 🚴🏼‍♀️ Woman Biking: Medium-Light Skin Tone 🚴🏽‍♀️ Woman Biking: Medium Skin Tone 🚴🏾‍♀️ Woman Biking: Medium-Dark Skin Tone 🚴🏿‍♀️ Woman Biking: Dark Skin Tone 🚵 Person Mountain Biking 🚵🏻 Person Mountain Biking: Light Skin Tone 🚵🏼 Person Mountain Biking: Medium-Light Skin Tone 🚵🏽 Person Mountain Biking: Medium Skin Tone 🚵🏾 Person Mountain Biking: Medium-Dark Skin Tone 🚵🏿 Person Mountain Biking: Dark Skin Tone 🚵‍♂️ Man Mountain Biking 🚵🏻‍♂️ Man Mountain Biking: Light Skin Tone 🚵🏼‍♂️ Man Mountain Biking: Medium-Light Skin Tone 🚵🏽‍♂️ Man Mountain Biking: Medium Skin Tone 🚵🏾‍♂️ Man Mountain Biking: Medium-Dark Skin Tone 🚵🏿‍♂️ Man Mountain Biking: Dark Skin Tone 🚵‍♀️ Woman Mountain Biking 🚵🏻‍♀️ Woman Mountain Biking: Light Skin Tone 🚵🏼‍♀️ Woman Mountain Biking: Medium-Light Skin Tone 🚵🏽‍♀️ Woman Mountain Biking: Medium Skin Tone 🚵🏾‍♀️ Woman Mountain Biking: Medium-Dark Skin Tone 🚵🏿‍♀️ Woman Mountain Biking: Dark Skin Tone 🏎 Racing Car 🏍 Motorcycle 🤸 Person Cartwheeling 🤸🏻 Person Cartwheeling: Light Skin Tone 🤸🏼 Person Cartwheeling: Medium-Light Skin Tone 🤸🏽 Person Cartwheeling: Medium Skin Tone 🤸🏾 Person Cartwheeling: Medium-Dark Skin Tone 🤸🏿 Person Cartwheeling: Dark Skin Tone 🤸‍♂️ Man Cartwheeling 🤸🏻‍♂️ Man Cartwheeling: Light Skin Tone 🤸🏼‍♂️ Man Cartwheeling: Medium-Light Skin Tone 🤸🏽‍♂️ Man Cartwheeling: Medium Skin Tone 🤸🏾‍♂️ Man Cartwheeling: Medium-Dark Skin Tone 🤸🏿‍♂️ Man Cartwheeling: Dark Skin Tone 🤸‍♀️ Woman Cartwheeling 🤸🏻‍♀️ Woman Cartwheeling: Light Skin Tone 🤸🏼‍♀️ Woman Cartwheeling: Medium-Light Skin Tone 🤸🏽‍♀️ Woman Cartwheeling: Medium Skin Tone 🤸🏾‍♀️ Woman Cartwheeling: Medium-Dark Skin Tone 🤸🏿‍♀️ Woman Cartwheeling: Dark Skin Tone 🤼 People Wrestling 🤼‍♂️ Men Wrestling 🤼‍♀️ Women Wrestling 🤽 Person Playing Water Polo 🤽🏻 Person Playing Water Polo: Light Skin Tone 🤽🏼 Person Playing Water Polo: Medium-Light Skin Tone 🤽🏽 Person Playing Water Polo: Medium Skin Tone 🤽🏾 Person Playing Water Polo: Medium-Dark Skin Tone 🤽🏿 Person Playing Water Polo: Dark Skin Tone 🤽‍♂️ Man Playing Water Polo 🤽🏻‍♂️ Man Playing Water Polo: Light Skin Tone 🤽🏼‍♂️ Man Playing Water Polo: Medium-Light Skin Tone 🤽🏽‍♂️ Man Playing Water Polo: Medium Skin Tone 🤽🏾‍♂️ Man Playing Water Polo: Medium-Dark Skin Tone 🤽🏿‍♂️ Man Playing Water Polo: Dark Skin Tone 🤽‍♀️ Woman Playing Water Polo 🤽🏻‍♀️ Woman Playing Water Polo: Light Skin Tone 🤽🏼‍♀️ Woman Playing Water Polo: Medium-Light Skin Tone 🤽🏽‍♀️ Woman Playing Water Polo: Medium Skin Tone 🤽🏾‍♀️ Woman Playing Water Polo: Medium-Dark Skin Tone 🤽🏿‍♀️ Woman Playing Water Polo: Dark Skin Tone 🤾 Person Playing Handball 🤾🏻 Person Playing Handball: Light Skin Tone 🤾🏼 Person Playing Handball: Medium-Light Skin Tone 🤾🏽 Person Playing Handball: Medium Skin Tone 🤾🏾 Person Playing Handball: Medium-Dark Skin Tone 🤾🏿 Person Playing Handball: Dark Skin Tone 🤾‍♂️ Man Playing Handball 🤾🏻‍♂️ Man Playing Handball: Light Skin Tone 🤾🏼‍♂️ Man Playing Handball: Medium-Light Skin Tone 🤾🏽‍♂️ Man Playing Handball: Medium Skin Tone 🤾🏾‍♂️ Man Playing Handball: Medium-Dark Skin Tone 🤾🏿‍♂️ Man Playing Handball: Dark Skin Tone 🤾‍♀️ Woman Playing Handball 🤾🏻‍♀️ Woman Playing Handball: Light Skin Tone 🤾🏼‍♀️ Woman Playing Handball: Medium-Light Skin Tone 🤾🏽‍♀️ Woman Playing Handball: Medium Skin Tone 🤾🏾‍♀️ Woman Playing Handball: Medium-Dark Skin Tone 🤾🏿‍♀️ Woman Playing Handball: Dark Skin Tone 🤹 Person Juggling 🤹🏻 Person Juggling: Light Skin Tone 🤹🏼 Person Juggling: Medium-Light Skin Tone 🤹🏽 Person Juggling: Medium Skin Tone 🤹🏾 Person Juggling: Medium-Dark Skin Tone 🤹🏿 Person Juggling: Dark Skin Tone 🤹‍♂️ Man Juggling 🤹🏻‍♂️ Man Juggling: Light Skin Tone 🤹🏼‍♂️ Man Juggling: Medium-Light Skin Tone 🤹🏽‍♂️ Man Juggling: Medium Skin Tone 🤹🏾‍♂️ Man Juggling: Medium-Dark Skin Tone 🤹🏿‍♂️ Man Juggling: Dark Skin Tone 🤹‍♀️ Woman Juggling 🤹🏻‍♀️ Woman Juggling: Light Skin Tone 🤹🏼‍♀️ Woman Juggling: Medium-Light Skin Tone 🤹🏽‍♀️ Woman Juggling: Medium Skin Tone 🤹🏾‍♀️ Woman Juggling: Medium-Dark Skin Tone 🤹🏿‍♀️ Woman Juggling: Dark Skin Tone 🤼🏻 Wrestlers, Type-1-2 🤼🏼 Wrestlers, Type-3 👫 Man and Woman Holding Hands 🤼🏽 Wrestlers, Type-4 👬 Two Men Holding Hands 🤼🏾 Wrestlers, Type-5 👭 Two Women Holding Hands 🤼🏿 Wrestlers, Type-6 💏 Kiss 👩‍❤️‍💋‍👨 Kiss: Woman, Man 🤼🏻‍♂️ Men Wrestling, Type-1-2 🤼🏼‍♂️ Men Wrestling, Type-3 🤼🏽‍♂️ Men Wrestling, Type-4 👨‍❤️‍💋‍👨 Kiss: Man, Man 🤼🏾‍♂️ Men Wrestling, Type-5 🤼🏿‍♂️ Men Wrestling, Type-6 👩‍❤️‍💋‍👩 Kiss: Woman, Woman 🤼🏻‍♀️ Women Wrestling, Type-1-2 💑 Couple With Heart 🤼🏼‍♀️ Women Wrestling, Type-3 👩‍❤️‍👨 Couple With Heart: Woman, Man 🤼🏽‍♀️ Women Wrestling, Type-4 🤼🏾‍♀️ Women Wrestling, Type-5 👨‍❤️‍👨 Couple With Heart: Man, Man 🤼🏿‍♀️ Women Wrestling, Type-6 👩‍❤️‍👩 Couple With Heart: Woman, Woman 👪 Family 👨‍👩‍👦 Family: Man, Woman, Boy 👨‍👩‍👧 Family: Man, Woman, Girl 👨‍👩‍👧‍👦 Family: Man, Woman, Girl, Boy 👨‍👩‍👦‍👦 Family: Man, Woman, Boy, Boy 👨‍👩‍👧‍👧 Family: Man, Woman, Girl, Girl 👨‍👨‍👦 Family: Man, Man, Boy 👨‍👨‍👧 Family: Man, Man, Girl 👨‍👨‍👧‍👦 Family: Man, Man, Girl, Boy 👨‍👨‍👦‍👦 Family: Man, Man, Boy, Boy 👨‍👨‍👧‍👧 Family: Man, Man, Girl, Girl 👩‍👩‍👦 Family: Woman, Woman, Boy 👩‍👩‍👧 Family: Woman, Woman, Girl 👩‍👩‍👧‍👦 Family: Woman, Woman, Girl, Boy 👩‍👩‍👦‍👦 Family: Woman, Woman, Boy, Boy 👩‍👩‍👧‍👧 Family: Woman, Woman, Girl, Girl 👨‍👦 Family: Man, Boy 👨‍👦‍👦 Family: Man, Boy, Boy 👨‍👧 Family: Man, Girl 👨‍👧‍👦 Family: Man, Girl, Boy 👨‍👧‍👧 Family: Man, Girl, Girl 👩‍👦 Family: Woman, Boy 👩‍👦‍👦 Family: Woman, Boy, Boy 👩‍👧 Family: Woman, Girl 👩‍👧‍👦 Family: Woman, Girl, Boy 👩‍👧‍👧 Family: Woman, Girl, Girl 🤳 Selfie 🤳🏻 Selfie: Light Skin Tone 🤳🏼 Selfie: Medium-Light Skin Tone 🤳🏽 Selfie: Medium Skin Tone 🤳🏾 Selfie: Medium-Dark Skin Tone 🤳🏿 Selfie: Dark Skin Tone 💪 Flexed Biceps 💪🏻 Flexed Biceps: Light Skin Tone 💪🏼 Flexed Biceps: Medium-Light Skin Tone 💪🏽 Flexed Biceps: Medium Skin Tone 💪🏾 Flexed Biceps: Medium-Dark Skin Tone 💪🏿 Flexed Biceps: Dark Skin Tone 👈 Backhand Index Pointing Left 👈🏻 Backhand Index Pointing Left: Light Skin Tone 👈🏼 Backhand Index Pointing Left: Medium-Light Skin Tone 👈🏽 Backhand Index Pointing Left: Medium Skin Tone 👈🏾 Backhand Index Pointing Left: Medium-Dark Skin Tone 👈🏿 Backhand Index Pointing Left: Dark Skin Tone 👉 Backhand Index Pointing Right 👉🏻 Backhand Index Pointing Right: Light Skin Tone 👉🏼 Backhand Index Pointing Right: Medium-Light Skin Tone 👉🏽 Backhand Index Pointing Right: Medium Skin Tone 👉🏾 Backhand Index Pointing Right: Medium-Dark Skin Tone 👉🏿 Backhand Index Pointing Right: Dark Skin Tone ☝ Index Pointing Up ☝🏻 Index Pointing Up: Light Skin Tone ☝🏼 Index Pointing Up: Medium-Light Skin Tone ☝🏽 Index Pointing Up: Medium Skin Tone ☝🏾 Index Pointing Up: Medium-Dark Skin Tone ☝🏿 Index Pointing Up: Dark Skin Tone 👆 Backhand Index Pointing Up 👆🏻 Backhand Index Pointing Up: Light Skin Tone 👆🏼 Backhand Index Pointing Up: Medium-Light Skin Tone 👆🏽 Backhand Index Pointing Up: Medium Skin Tone 👆🏾 Backhand Index Pointing Up: Medium-Dark Skin Tone 👆🏿 Backhand Index Pointing Up: Dark Skin Tone 🖕 Middle Finger 🖕🏻 Middle Finger: Light Skin Tone 🖕🏼 Middle Finger: Medium-Light Skin Tone 🖕🏽 Middle Finger: Medium Skin Tone 🖕🏾 Middle Finger: Medium-Dark Skin Tone 🖕🏿 Middle Finger: Dark Skin Tone 👇 Backhand Index Pointing Down 👇🏻 Backhand Index Pointing Down: Light Skin Tone 👇🏼 Backhand Index Pointing Down: Medium-Light Skin Tone 👇🏽 Backhand Index Pointing Down: Medium Skin Tone 👇🏾 Backhand Index Pointing Down: Medium-Dark Skin Tone 👇🏿 Backhand Index Pointing Down: Dark Skin Tone ✌ Victory Hand ✌🏻 Victory Hand: Light Skin Tone ✌🏼 Victory Hand: Medium-Light Skin Tone ✌🏽 Victory Hand: Medium Skin Tone ✌🏾 Victory Hand: Medium-Dark Skin Tone ✌🏿 Victory Hand: Dark Skin Tone 🤞 Crossed Fingers 🤞🏻 Crossed Fingers: Light Skin Tone 🤞🏼 Crossed Fingers: Medium-Light Skin Tone 🤞🏽 Crossed Fingers: Medium Skin Tone 🤞🏾 Crossed Fingers: Medium-Dark Skin Tone 🤞🏿 Crossed Fingers: Dark Skin Tone 🖖 Vulcan Salute 🖖🏻 Vulcan Salute: Light Skin Tone 🖖🏼 Vulcan Salute: Medium-Light Skin Tone 🖖🏽 Vulcan Salute: Medium Skin Tone 🖖🏾 Vulcan Salute: Medium-Dark Skin Tone 🖖🏿 Vulcan Salute: Dark Skin Tone 🤘 Sign of the Horns 🤘🏻 Sign of the Horns: Light Skin Tone 🤘🏼 Sign of the Horns: Medium-Light Skin Tone 🤘🏽 Sign of the Horns: Medium Skin Tone 🤘🏾 Sign of the Horns: Medium-Dark Skin Tone 🤘🏿 Sign of the Horns: Dark Skin Tone 🤙 Call Me Hand 🤙🏻 Call Me Hand: Light Skin Tone 🤙🏼 Call Me Hand: Medium-Light Skin Tone 🤙🏽 Call Me Hand: Medium Skin Tone 🤙🏾 Call Me Hand: Medium-Dark Skin Tone 🤙🏿 Call Me Hand: Dark Skin Tone 🖐 Raised Hand With Fingers Splayed 🖐🏻 Raised Hand With Fingers Splayed: Light Skin Tone 🖐🏼 Raised Hand With Fingers Splayed: Medium-Light Skin Tone 🖐🏽 Raised Hand With Fingers Splayed: Medium Skin Tone 🖐🏾 Raised Hand With Fingers Splayed: Medium-Dark Skin Tone 🖐🏿 Raised Hand With Fingers Splayed: Dark Skin Tone ✋ Raised Hand ✋🏻 Raised Hand: Light Skin Tone ✋🏼 Raised Hand: Medium-Light Skin Tone ✋🏽 Raised Hand: Medium Skin Tone ✋🏾 Raised Hand: Medium-Dark Skin Tone ✋🏿 Raised Hand: Dark Skin Tone 👌 OK Hand 👌🏻 OK Hand: Light Skin Tone 👌🏼 OK Hand: Medium-Light Skin Tone 👌🏽 OK Hand: Medium Skin Tone 👌🏾 OK Hand: Medium-Dark Skin Tone 👌🏿 OK Hand: Dark Skin Tone 👍 Thumbs Up 👍🏻 Thumbs Up: Light Skin Tone 👍🏼 Thumbs Up: Medium-Light Skin Tone 👍🏽 Thumbs Up: Medium Skin Tone 👍🏾 Thumbs Up: Medium-Dark Skin Tone 👍🏿 Thumbs Up: Dark Skin Tone 👎 Thumbs Down 👎🏻 Thumbs Down: Light Skin Tone 👎🏼 Thumbs Down: Medium-Light Skin Tone 👎🏽 Thumbs Down: Medium Skin Tone 👎🏾 Thumbs Down: Medium-Dark Skin Tone 👎🏿 Thumbs Down: Dark Skin Tone ✊ Raised Fist ✊🏻 Raised Fist: Light Skin Tone ✊🏼 Raised Fist: Medium-Light Skin Tone ✊🏽 Raised Fist: Medium Skin Tone ✊🏾 Raised Fist: Medium-Dark Skin Tone ✊🏿 Raised Fist: Dark Skin Tone 👊 Oncoming Fist 👊🏻 Oncoming Fist: Light Skin Tone 👊🏼 Oncoming Fist: Medium-Light Skin Tone 👊🏽 Oncoming Fist: Medium Skin Tone 👊🏾 Oncoming Fist: Medium-Dark Skin Tone 👊🏿 Oncoming Fist: Dark Skin Tone 🤛 Left-Facing Fist 🤛🏻 Left-Facing Fist: Light Skin Tone 🤛🏼 Left-Facing Fist: Medium-Light Skin Tone 🤛🏽 Left-Facing Fist: Medium Skin Tone 🤛🏾 Left-Facing Fist: Medium-Dark Skin Tone 🤛🏿 Left-Facing Fist: Dark Skin Tone 🤜 Right-Facing Fist 🤜🏻 Right-Facing Fist: Light Skin Tone 🤜🏼 Right-Facing Fist: Medium-Light Skin Tone 🤜🏽 Right-Facing Fist: Medium Skin Tone 🤜🏾 Right-Facing Fist: Medium-Dark Skin Tone 🤜🏿 Right-Facing Fist: Dark Skin Tone 🤚 Raised Back of Hand 🤚🏻 Raised Back of Hand: Light Skin Tone 🤚🏼 Raised Back of Hand: Medium-Light Skin Tone 🤚🏽 Raised Back of Hand: Medium Skin Tone 🤚🏾 Raised Back of Hand: Medium-Dark Skin Tone 🤚🏿 Raised Back of Hand: Dark Skin Tone 👋 Waving Hand 👋🏻 Waving Hand: Light Skin Tone 👋🏼 Waving Hand: Medium-Light Skin Tone 👋🏽 Waving Hand: Medium Skin Tone 👋🏾 Waving Hand: Medium-Dark Skin Tone 👋🏿 Waving Hand: Dark Skin Tone 🤟 Love-You Gesture 🤟🏻 Love-You Gesture: Light Skin Tone 🤟🏼 Love-You Gesture: Medium-Light Skin Tone 🤟🏽 Love-You Gesture: Medium Skin Tone 🤟🏾 Love-You Gesture: Medium-Dark Skin Tone 🤟🏿 Love-You Gesture: Dark Skin Tone ✍ Writing Hand ✍🏻 Writing Hand: Light Skin Tone ✍🏼 Writing Hand: Medium-Light Skin Tone ✍🏽 Writing Hand: Medium Skin Tone ✍🏾 Writing Hand: Medium-Dark Skin Tone ✍🏿 Writing Hand: Dark Skin Tone 👏 Clapping Hands 👏🏻 Clapping Hands: Light Skin Tone 👏🏼 Clapping Hands: Medium-Light Skin Tone 👏🏽 Clapping Hands: Medium Skin Tone 👏🏾 Clapping Hands: Medium-Dark Skin Tone 👏🏿 Clapping Hands: Dark Skin Tone 👐 Open Hands 👐🏻 Open Hands: Light Skin Tone 👐🏼 Open Hands: Medium-Light Skin Tone 👐🏽 Open Hands: Medium Skin Tone 👐🏾 Open Hands: Medium-Dark Skin Tone 👐🏿 Open Hands: Dark Skin Tone 🙌 Raising Hands 🙌🏻 Raising Hands: Light Skin Tone 🙌🏼 Raising Hands: Medium-Light Skin Tone 🙌🏽 Raising Hands: Medium Skin Tone 🙌🏾 Raising Hands: Medium-Dark Skin Tone 🙌🏿 Raising Hands: Dark Skin Tone 🤲 Palms Up Together 🤲🏻 Palms Up Together: Light Skin Tone 🤲🏼 Palms Up Together: Medium-Light Skin Tone 🤲🏽 Palms Up Together: Medium Skin Tone 🤲🏾 Palms Up Together: Medium-Dark Skin Tone 🤲🏿 Palms Up Together: Dark Skin Tone 🙏 Folded Hands 🙏🏻 Folded Hands: Light Skin Tone 🙏🏼 Folded Hands: Medium-Light Skin Tone 🙏🏽 Folded Hands: Medium Skin Tone 🙏🏾 Folded Hands: Medium-Dark Skin Tone 🙏🏿 Folded Hands: Dark Skin Tone 🤝 Handshake 💅 Nail Polish 💅🏻 Nail Polish: Light Skin Tone 💅🏼 Nail Polish: Medium-Light Skin Tone 💅🏽 Nail Polish: Medium Skin Tone 💅🏾 Nail Polish: Medium-Dark Skin Tone 💅🏿 Nail Polish: Dark Skin Tone 👂 Ear 👂🏻 Ear: Light Skin Tone 👂🏼 Ear: Medium-Light Skin Tone 👂🏽 Ear: Medium Skin Tone 👂🏾 Ear: Medium-Dark Skin Tone 👂🏿 Ear: Dark Skin Tone 👃 Nose 👃🏻 Nose: Light Skin Tone 👃🏼 Nose: Medium-Light Skin Tone 👃🏽 Nose: Medium Skin Tone 👃🏾 Nose: Medium-Dark Skin Tone 👃🏿 Nose: Dark Skin Tone 👣 Footprints 👀 Eyes 👁 Eye 👁️‍🗨️ Eye in Speech Bubble 🧠 Brain 👅 Tongue 👄 Mouth 💋 Kiss Mark 💘 Heart With Arrow ❤ Red Heart 💓 Beating Heart 💔 Broken Heart 💕 Two Hearts 💖 Sparkling Heart 💗 Growing Heart 💙 Blue Heart 💚 Green Heart 💛 Yellow Heart 🧡 Orange Heart 💜 Purple Heart 🖤 Black Heart 💝 Heart With Ribbon 💞 Revolving Hearts 💟 Heart Decoration ❣ Heavy Heart Exclamation 💌 Love Letter 💤 Zzz 💢 Anger Symbol 💣 Bomb 💥 Collision 💦 Sweat Droplets 💨 Dashing Away 💫 Dizzy 💬 Speech Balloon 🗨 Left Speech Bubble 🗯 Right Anger Bubble 💭 Thought Balloon 🕳 Hole 👓 Glasses 🕶 Sunglasses 👔 Necktie 👕 T-Shirt 👖 Jeans 🧣 Scarf 🧤 Gloves 🧥 Coat 🧦 Socks 👗 Dress 👘 Kimono 👙 Bikini 👚 Woman’s Clothes 👛 Purse 👜 Handbag 👝 Clutch Bag 🛍 Shopping Bags 🎒 School Backpack 👞 Man’s Shoe 👟 Running Shoe 👠 High-Heeled Shoe 👡 Woman’s Sandal 👢 Woman’s Boot 👑 Crown 👒 Woman’s Hat 🎩 Top Hat 🎓 Graduation Cap 🧢 Billed Cap ⛑ Rescue Worker’s Helmet 📿 Prayer Beads 💄 Lipstick 💍 Ring 💎 Gem Stone 🐵 Monkey Face 🐒 Monkey 🦍 Gorilla 🐶 Dog Face 🐕 Dog 🐩 Poodle 🐺 Wolf Face 🦊 Fox Face 🐱 Cat Face 🐈 Cat 🦁 Lion Face 🐯 Tiger Face 🐅 Tiger 🐆 Leopard 🐴 Horse Face 🐎 Horse 🦄 Unicorn Face 🦓 Zebra 🦌 Deer 🐮 Cow Face 🐂 Ox 🐃 Water Buffalo 🐄 Cow 🐷 Pig Face 🐖 Pig 🐗 Boar 🐽 Pig Nose 🐏 Ram 🐑 Ewe 🐐 Goat 🐪 Camel 🐫 Two-Hump Camel 🦒 Giraffe 🐘 Elephant 🦏 Rhinoceros 🐭 Mouse Face 🐁 Mouse 🐀 Rat 🐹 Hamster Face 🐰 Rabbit Face 🐇 Rabbit 🐿 Chipmunk 🦔 Hedgehog 🦇 Bat 🐻 Bear Face 🐨 Koala 🐼 Panda Face 🐾 Paw Prints 🦃 Turkey 🐔 Chicken 🐓 Rooster 🐣 Hatching Chick 🐤 Baby Chick 🐥 Front-Facing Baby Chick 🐦 Bird 🐧 Penguin 🕊 Dove 🦅 Eagle 🦆 Duck 🦉 Owl 🐸 Frog Face 🐊 Crocodile 🐢 Turtle 🦎 Lizard 🐍 Snake 🐲 Dragon Face 🐉 Dragon 🦕 Sauropod 🦖 T-Rex 🐳 Spouting Whale 🐋 Whale 🐬 Dolphin 🐟 Fish 🐠 Tropical Fish 🐡 Blowfish 🦈 Shark 🐙 Octopus 🐚 Spiral Shell 🦀 Crab 🦐 Shrimp 🦑 Squid 🐌 Snail 🦋 Butterfly 🐛 Bug 🐜 Ant 🐝 Honeybee 🐞 Lady Beetle 🦗 Cricket 🕷 Spider 🕸 Spider Web 🦂 Scorpion 💐 Bouquet 🌸 Cherry Blossom 💮 White Flower 🏵 Rosette 🌹 Rose 🥀 Wilted Flower 🌺 Hibiscus 🌻 Sunflower 🌼 Blossom 🌷 Tulip 🌱 Seedling 🌲 Evergreen Tree 🌳 Deciduous Tree 🌴 Palm Tree 🌵 Cactus 🌾 Sheaf of Rice 🌿 Herb ☘ Shamrock 🍀 Four Leaf Clover 🍁 Maple Leaf 🍂 Fallen Leaf 🍃 Leaf Fluttering in Wind 🍇 Grapes 🍈 Melon 🍉 Watermelon 🍊 Tangerine 🍋 Lemon 🍌 Banana 🍍 Pineapple 🍎 Red Apple 🍏 Green Apple 🍐 Pear 🍑 Peach 🍒 Cherries 🍓 Strawberry 🥝 Kiwi Fruit 🍅 Tomato 🥥 Coconut 🥑 Avocado 🍆 Eggplant 🥔 Potato 🥕 Carrot 🌽 Ear of Corn 🌶 Hot Pepper 🥒 Cucumber 🥦 Broccoli 🍄 Mushroom 🥜 Peanuts 🌰 Chestnut 🍞 Bread 🥐 Croissant 🥖 Baguette Bread 🥨 Pretzel 🥞 Pancakes 🧀 Cheese Wedge 🍖 Meat on Bone 🍗 Poultry Leg 🥩 Cut of Meat 🥓 Bacon 🍔 Hamburger 🍟 French Fries 🍕 Pizza 🌭 Hot Dog 🥪 Sandwich 🌮 Taco 🌯 Burrito 🥙 Stuffed Flatbread 🥚 Egg 🍳 Cooking 🥘 Shallow Pan of Food 🍲 Pot of Food 🥣 Bowl With Spoon 🥗 Green Salad 🍿 Popcorn 🥫 Canned Food 🍱 Bento Box 🍘 Rice Cracker 🍙 Rice Ball 🍚 Cooked Rice 🍛 Curry Rice 🍜 Steaming Bowl 🍝 Spaghetti 🍠 Roasted Sweet Potato 🍢 Oden 🍣 Sushi 🍤 Fried Shrimp 🍥 Fish Cake With Swirl 🍡 Dango 🥟 Dumpling 🥠 Fortune Cookie 🥡 Takeout Box 🍦 Soft Ice Cream 🍧 Shaved Ice 🍨 Ice Cream 🍩 Doughnut 🍪 Cookie 🎂 Birthday Cake 🍰 Shortcake 🥧 Pie 🍫 Chocolate Bar 🍬 Candy 🍭 Lollipop 🍮 Custard 🍯 Honey Pot 🍼 Baby Bottle 🥛 Glass of Milk ☕ Hot Beverage 🍵 Teacup Without Handle 🍶 Sake 🍾 Bottle With Popping Cork 🍷 Wine Glass 🍸 Cocktail Glass 🍹 Tropical Drink 🍺 Beer Mug 🍻 Clinking Beer Mugs 🥂 Clinking Glasses 🥃 Tumbler Glass 🥤 Cup With Straw 🥢 Chopsticks 🍽 Fork and Knife With Plate 🍴 Fork and Knife 🥄 Spoon 🔪 Kitchen Knife 🏺 Amphora 🌍 Globe Showing Europe-Africa 🌎 Globe Showing Americas 🌏 Globe Showing Asia-Australia 🌐 Globe With Meridians 🗺 World Map 🗾 Map of Japan 🏔 Snow-Capped Mountain ⛰ Mountain 🌋 Volcano 🗻 Mount Fuji 🏕 Camping 🏖 Beach With Umbrella 🏜 Desert 🏝 Desert Island 🏞 National Park 🏟 Stadium 🏛 Classical Building 🏗 Building Construction 🏘 House 🏙 Cityscape 🏚 Derelict House 🏠 House 🏡 House With Garden 🏢 Office Building 🏣 Japanese Post Office 🏤 Post Office 🏥 Hospital 🏦 Bank 🏨 Hotel 🏩 Love Hotel 🏪 Convenience Store 🏫 School 🏬 Department Store 🏭 Factory 🏯 Japanese Castle 🏰 Castle 💒 Wedding 🗼 Tokyo Tower 🗽 Statue of Liberty ⛪ Church 🕌 Mosque 🕍 Synagogue ⛩ Shinto Shrine 🕋 Kaaba ⛲ Fountain ⛺ Tent 🌁 Foggy 🌃 Night With Stars 🌄 Sunrise Over Mountains 🌅 Sunrise 🌆 Cityscape at Dusk 🌇 Sunset 🌉 Bridge at Night ♨ Hot Springs 🌌 Milky Way 🎠 Carousel Horse 🎡 Ferris Wheel 🎢 Roller Coaster 💈 Barber Pole 🎪 Circus Tent 🎭 Performing Arts 🖼 Framed Picture 🎨 Artist Palette 🎰 Slot Machine 🚂 Locomotive 🚃 Railway Car 🚄 High-Speed Train 🚅 High-Speed Train With Bullet Nose 🚆 Train 🚇 Metro 🚈 Light Rail 🚉 Station 🚊 Tram 🚝 Monorail 🚞 Mountain Railway 🚋 Tram Car 🚌 Bus 🚍 Oncoming Bus 🚎 Trolleybus 🚐 Minibus 🚑 Ambulance 🚒 Fire Engine 🚓 Police Car 🚔 Oncoming Police Car 🚕 Taxi 🚖 Oncoming Taxi 🚗 Automobile 🚘 Oncoming Automobile 🚙 Sport Utility Vehicle 🚚 Delivery Truck 🚛 Articulated Lorry 🚜 Tractor 🚲 Bicycle 🛴 Kick Scooter 🛵 Motor Scooter 🚏 Bus Stop 🛣 Motorway 🛤 Railway Track ⛽ Fuel Pump 🚨 Police Car Light 🚥 Horizontal Traffic Light 🚦 Vertical Traffic Light 🚧 Construction 🛑 Stop Sign ⚓ Anchor ⛵ Sailboat 🛶 Canoe 🚤 Speedboat 🛳 Passenger Ship ⛴ Ferry 🛥 Motor Boat 🚢 Ship ✈ Airplane 🛩 Small Airplane 🛫 Airplane Departure 🛬 Airplane Arrival 💺 Seat 🚁 Helicopter 🚟 Suspension Railway 🚠 Mountain Cableway 🚡 Aerial Tramway 🛰 Satellite 🚀 Rocket 🛸 Flying Saucer 🛎 Bellhop Bell 🚪 Door 🛏 Bed 🛋 Couch and Lamp 🚽 Toilet 🚿 Shower 🛁 Bathtub ⌛ Hourglass ⏳ Hourglass With Flowing Sand ⌚ Watch ⏰ Alarm Clock ⏱ Stopwatch ⏲ Timer Clock 🕰 Mantelpiece Clock 🕛 Twelve O’clock 🕧 Twelve-Thirty 🕐 One O’clock 🕜 One-Thirty 🕑 Two O’clock 🕝 Two-Thirty 🕒 Three O’clock 🕞 Three-Thirty 🕓 Four O’clock 🕟 Four-Thirty 🕔 Five O’clock 🕠 Five-Thirty 🕕 Six O’clock 🕡 Six-Thirty 🕖 Seven O’clock 🕢 Seven-Thirty 🕗 Eight O’clock 🕣 Eight-Thirty 🕘 Nine O’clock 🕤 Nine-Thirty 🕙 Ten O’clock 🕥 Ten-Thirty 🕚 Eleven O’clock 🕦 Eleven-Thirty 🌑 New Moon 🌒 Waxing Crescent Moon 🌓 First Quarter Moon 🌔 Waxing Gibbous Moon 🌕 Full Moon 🌖 Waning Gibbous Moon 🌗 Last Quarter Moon 🌘 Waning Crescent Moon 🌙 Crescent Moon 🌚 New Moon Face 🌛 First Quarter Moon With Face 🌜 Last Quarter Moon With Face 🌡 Thermometer ☀ Sun 🌝 Full Moon With Face 🌞 Sun With Face ⭐ White Medium Star 🌟 Glowing Star 🌠 Shooting Star ☁ Cloud ⛅ Sun Behind Cloud ⛈ Cloud With Lightning and Rain 🌤 Sun Behind Small Cloud 🌥 Sun Behind Large Cloud 🌦 Sun Behind Rain Cloud 🌧 Cloud With Rain 🌨 Cloud With Snow 🌩 Cloud With Lightning 🌪 Tornado 🌫 Fog 🌬 Wind Face 🌀 Cyclone 🌈 Rainbow 🌂 Closed Umbrella ☂ Umbrella ☔ Umbrella With Rain Drops ⛱ Umbrella on Ground ⚡ High Voltage ❄ Snowflake ☃ Snowman ⛄ Snowman Without Snow ☄ Comet 🔥 Fire 💧 Droplet 🌊 Water Wave 🎃 Jack-O-Lantern 🎄 Christmas Tree 🎆 Fireworks 🎇 Sparkler ✨ Sparkles 🎈 Balloon 🎉 Party Popper 🎊 Confetti Ball 🎋 Tanabata Tree 🎍 Pine Decoration 🎎 Japanese Dolls 🎏 Carp Streamer 🎐 Wind Chime 🎑 Moon Viewing Ceremony 🎀 Ribbon 🎁 Wrapped Gift 🎗 Reminder Ribbon 🎟 Admission Tickets 🎫 Ticket 🎖 Military Medal 🏆 Trophy 🏅 Sports Medal 🥇 1st Place Medal 🥈 2nd Place Medal 🥉 3rd Place Medal ⚽ Soccer Ball ⚾ Baseball 🏀 Basketball 🏐 Volleyball 🏈 American Football 🏉 Rugby Football 🎾 Tennis 🎱 Pool 8 Ball 🎳 Bowling 🏏 Cricket 🏑 Field Hockey 🏒 Ice Hockey 🏓 Ping Pong 🏸 Badminton 🥊 Boxing Glove 🥋 Martial Arts Uniform 🥅 Goal Net 🎯 Direct Hit ⛳ Flag in Hole ⛸ Ice Skate 🎣 Fishing Pole 🎽 Running Shirt 🎿 Skis 🛷 Sled 🥌 Curling Stone 🎮 Video Game 🕹 Joystick 🎲 Game Die ♠ Spade Suit ♥ Heart Suit ♦ Diamond Suit ♣ Club Suit 🃏 Joker 🀄 Mahjong Red Dragon 🎴 Flower Playing Cards 🔇 Muted Speaker 🔈 Speaker Low Volume 🔉 Speaker Medium Volume 🔊 Speaker High Volume 📢 Loudspeaker 📣 Megaphone 📯 Postal Horn 🔔 Bell 🔕 Bell With Slash 🎼 Musical Score 🎵 Musical Note 🎶 Musical Notes 🎙 Studio Microphone 🎚 Level Slider 🎛 Control Knobs 🎤 Microphone 🎧 Headphone 📻 Radio 🎷 Saxophone 🎸 Guitar 🎹 Musical Keyboard 🎺 Trumpet 🎻 Violin 🥁 Drum 📱 Mobile Phone 📲 Mobile Phone With Arrow ☎ Telephone 📞 Telephone Receiver 📟 Pager 📠 Fax Machine 🔋 Battery 🔌 Electric Plug 💻 Laptop Computer 🖥 Desktop Computer 🖨 Printer ⌨ Keyboard 🖱 Computer Mouse 🖲 Trackball 💽 Computer Disk 💾 Floppy Disk 💿 Optical Disk 📀 DVD 🎥 Movie Camera 🎞 Film Frames 📽 Film Projector 🎬 Clapper Board 📺 Television 📷 Camera 📸 Camera With Flash 📹 Video Camera 📼 Videocassette 🔍 Left-Pointing Magnifying Glass 🔎 Right-Pointing Magnifying Glass 🔬 Microscope 🔭 Telescope 📡 Satellite Antenna 🕯 Candle 💡 Light Bulb 🔦 Flashlight 🏮 Red Paper Lantern 📔 Notebook With Decorative Cover 📕 Closed Book 📖 Open Book 📗 Green Book 📘 Blue Book 📙 Orange Book 📚 Books 📓 Notebook 📒 Ledger 📃 Page With Curl 📜 Scroll 📄 Page Facing Up 📰 Newspaper 🗞 Rolled-Up Newspaper 📑 Bookmark Tabs 🔖 Bookmark 🏷 Label 💰 Money Bag 💴 Yen Banknote 💵 Dollar Banknote 💶 Euro Banknote 💷 Pound Banknote 💸 Money With Wings 💳 Credit Card 💹 Chart Increasing With Yen 💱 Currency Exchange 💲 Heavy Dollar Sign ✉ Envelope 📧 E-Mail 📨 Incoming Envelope 📩 Envelope With Arrow 📤 Outbox Tray 📥 Inbox Tray 📦 Package 📫 Closed Mailbox With Raised Flag 📪 Closed Mailbox With Lowered Flag 📬 Open Mailbox With Raised Flag 📭 Open Mailbox With Lowered Flag 📮 Postbox 🗳 Ballot Box With Ballot ✏ Pencil ✒ Black Nib 🖋 Fountain Pen 🖊 Pen 🖌 Paintbrush 🖍 Crayon 📝 Memo 💼 Briefcase 📁 File Folder 📂 Open File Folder 🗂 Card Index Dividers 📅 Calendar 📆 Tear-Off Calendar 🗒 Spiral Notepad 🗓 Spiral Calendar 📇 Card Index 📈 Chart Increasing 📉 Chart Decreasing 📊 Bar Chart 📋 Clipboard 📌 Pushpin 📍 Round Pushpin 📎 Paperclip 🖇 Linked Paperclips 📏 Straight Ruler 📐 Triangular Ruler ✂ Scissors 🗃 Card File Box 🗄 File Cabinet 🗑 Wastebasket 🔒 Locked 🔓 Unlocked 🔏 Locked With Pen 🔐 Locked With Key 🔑 Key 🗝 Old Key 🔨 Hammer ⛏ Pick ⚒ Hammer and Pick 🛠 Hammer and Wrench 🗡 Dagger ⚔ Crossed Swords 🔫 Pistol 🏹 Bow and Arrow 🛡 Shield 🔧 Wrench 🔩 Nut and Bolt ⚙ Gear 🗜 Clamp ⚗ Alembic ⚖ Balance Scale 🔗 Link ⛓ Chains 💉 Syringe 💊 Pill 🚬 Cigarette ⚰ Coffin ⚱ Funeral Urn 🗿 Moai 🛢 Oil Drum 🔮 Crystal Ball 🛒 Shopping Cart 🏧 Atm Sign 🚮 Litter in Bin Sign 🚰 Potable Water ♿ Wheelchair Symbol 🚹 Men’s Room 🚺 Women’s Room 🚻 Restroom 🚼 Baby Symbol 🚾 Water Closet 🛂 Passport Control 🛃 Customs 🛄 Baggage Claim 🛅 Left Luggage ⚠ Warning 🚸 Children Crossing ⛔ No Entry 🚫 Prohibited 🚳 No Bicycles 🚭 No Smoking 🚯 No Littering 🚱 Non-Potable Water 🚷 No Pedestrians 📵 No Mobile Phones 🔞 No One Under Eighteen ☢ Radioactive ☣ Biohazard ⬆ Up Arrow ↗ Up-Right Arrow ➡ Right Arrow ↘ Down-Right Arrow ⬇ Down Arrow ↙ Down-Left Arrow ⬅ Left Arrow ↖ Up-Left Arrow ↕ Up-Down Arrow ↔ Left-Right Arrow ↩ Right Arrow Curving Left ↪ Left Arrow Curving Right ⤴ Right Arrow Curving Up ⤵ Right Arrow Curving Down 🔃 Clockwise Vertical Arrows 🔄 Anticlockwise Arrows Button 🔙 Back Arrow 🔚 End Arrow 🔛 On! Arrow 🔜 Soon Arrow 🔝 Top Arrow 🛐 Place of Worship ⚛ Atom Symbol 🕉 Om ✡ Star of David ☸ Wheel of Dharma ☯ Yin Yang ✝ Latin Cross ☦ Orthodox Cross ☪ Star and Crescent ☮ Peace Symbol 🕎 Menorah 🔯 Dotted Six-Pointed Star ♈ Aries ♉ Taurus ♊ Gemini ♋ Cancer ♌ Leo ♍ Virgo ♎ Libra ♏ Scorpius ♐ Sagittarius ♑ Capricorn ♒ Aquarius ♓ Pisces ⛎ Ophiuchus 🔀 Shuffle Tracks Button 🔁 Repeat Button 🔂 Repeat Single Button ▶ Play Button ⏩ Fast-Forward Button ⏭ Next Track Button ⏯ Play or Pause Button ◀ Reverse Button ⏪ Fast Reverse Button ⏮ Last Track Button 🔼 Up Button ⏫ Fast Up Button 🔽 Down Button ⏬ Fast Down Button ⏸ Pause Button ⏹ Stop Button ⏺ Record Button ⏏ Eject Button 🎦 Cinema 🔅 Dim Button 🔆 Bright Button 📶 Antenna Bars 📳 Vibration Mode 📴 Mobile Phone Off ♀ Female Sign ♂ Male Sign ⚕ Medical Symbol ♻ Recycling Symbol ⚜ Fleur-De-Lis 🔱 Trident Emblem 📛 Name Badge 🔰 Japanese Symbol for Beginner ⭕ Heavy Large Circle ✅ White Heavy Check Mark ☑ Ballot Box With Check ✔ Heavy Check Mark ✖ Heavy Multiplication X ❌ Cross Mark ❎ Cross Mark Button ➕ Heavy Plus Sign ➖ Heavy Minus Sign ➗ Heavy Division Sign ➰ Curly Loop ➿ Double Curly Loop 〽 Part Alternation Mark ✳ Eight-Spoked Asterisk ✴ Eight-Pointed Star ❇ Sparkle ‼ Double Exclamation Mark ⁉ Exclamation Question Mark ❓ Question Mark ❔ White Question Mark ❕ White Exclamation Mark ❗ Exclamation Mark 〰 Wavy Dash © Copyright ® Registered ™ Trade Mark #️⃣ Keycap Number Sign *️⃣ Keycap Asterisk 0️⃣ Keycap Digit Zero 1️⃣ Keycap Digit One 2️⃣ Keycap Digit Two 3️⃣ Keycap Digit Three 4️⃣ Keycap Digit Four 5️⃣ Keycap Digit Five 6️⃣ Keycap Digit Six 7️⃣ Keycap Digit Seven 8️⃣ Keycap Digit Eight 9️⃣ Keycap Digit Nine 🔟 Keycap 10 💯 Hundred Points 🔠 Input Latin Uppercase 🔡 Input Latin Lowercase 🔢 Input Numbers 🔣 Input Symbols 🔤 Input Latin Letters 🅰 A Button (blood Type) 🆎 Ab Button (blood Type) 🅱 B Button (blood Type) 🆑 CL Button 🆒 Cool Button 🆓 Free Button ℹ Information 🆔 ID Button Ⓜ Circled M 🆕 New Button 🆖 NG Button 🅾 O Button (blood Type) 🆗 OK Button 🅿 P Button 🆘 SOS Button 🆙 Up! Button 🆚 Vs Button 🈁 Japanese “here” Button 🈂 Japanese “service Charge” Button 🈷 Japanese “monthly Amount” Button 🈶 Japanese “not Free of Charge” Button 🈯 Japanese “reserved” Button 🉐 Japanese “bargain” Button 🈹 Japanese “discount” Button 🈚 Japanese “free of Charge” Button 🈲 Japanese “prohibited” Button 🉑 Japanese “acceptable” Button 🈸 Japanese “application” Button 🈴 Japanese “passing Grade” Button 🈳 Japanese “vacancy” Button ㊗ Japanese “congratulations” Button ㊙ Japanese “secret” Button 🈺 Japanese “open for Business” Button 🈵 Japanese “no Vacancy” Button ▪ Black Small Square ▫ White Small Square ◻ White Medium Square ◼ Black Medium Square ◽ White Medium-Small Square ◾ Black Medium-Small Square ⬛ Black Large Square ⬜ White Large Square 🔶 Large Orange Diamond 🔷 Large Blue Diamond 🔸 Small Orange Diamond 🔹 Small Blue Diamond 🔺 Red Triangle Pointed Up 🔻 Red Triangle Pointed Down 💠 Diamond With a Dot 🔘 Radio Button 🔲 Black Square Button 🔳 White Square Button ⚪ White Circle ⚫ Black Circle 🔴 Red Circle 🔵 Blue Circle 🏁 Chequered Flag 🚩 Triangular Flag 🎌 Crossed Flags 🏴 Black Flag 🏳 White Flag 🏳️‍🌈 Rainbow Flag 🇦🇨 Ascension Island 🇦🇩 Andorra 🇦🇪 United Arab Emirates 🇦🇫 Afghanistan 🇦🇬 Antigua & Barbuda 🇦🇮 Anguilla 🇦🇱 Albania 🇦🇲 Armenia 🇦🇴 Angola 🇦🇶 Antarctica 🇦🇷 Argentina 🇦🇸 American Samoa 🇦🇹 Austria 🇦🇺 Australia 🇦🇼 Aruba 🇦🇽 Åland Islands 🇦🇿 Azerbaijan 🇧🇦 Bosnia & Herzegovina 🇧🇧 Barbados 🇧🇩 Bangladesh 🇧🇪 Belgium 🇧🇫 Burkina Faso 🇧🇬 Bulgaria 🇧🇭 Bahrain 🇧🇮 Burundi 🇧🇯 Benin 🇧🇱 St. Barthélemy 🇧🇲 Bermuda 🇧🇳 Brunei 🇧🇴 Bolivia 🇧🇶 Caribbean Netherlands 🇧🇷 Brazil 🇧🇸 Bahamas 🇧🇹 Bhutan 🇧🇻 Bouvet Island 🇧🇼 Botswana 🇧🇾 Belarus 🇧🇿 Belize 🇨🇦 Canada 🇨🇨 Cocos (Keeling) Islands 🇨🇩 Congo - Kinshasa 🇨🇫 Central African Republic 🇨🇬 Congo - Brazzaville 🇨🇭 Switzerland 🇨🇮 Côte D’Ivoire 🇨🇰 Cook Islands 🇨🇱 Chile 🇨🇲 Cameroon 🇨🇳 China 🇨🇴 Colombia 🇨🇵 Clipperton Island 🇨🇷 Costa Rica 🇨🇺 Cuba 🇨🇻 Cape Verde 🇨🇼 Curaçao 🇨🇽 Christmas Island 🇨🇾 Cyprus 🇨🇿 Czechia 🇩🇪 Germany 🇩🇬 Diego Garcia 🇩🇯 Djibouti 🇩🇰 Denmark 🇩🇲 Dominica 🇩🇴 Dominican Republic 🇩🇿 Algeria 🇪🇦 Ceuta & Melilla 🇪🇨 Ecuador 🇪🇪 Estonia 🇪🇬 Egypt 🇪🇭 Western Sahara 🇪🇷 Eritrea 🇪🇸 Spain 🇪🇹 Ethiopia 🇪🇺 European Union 🇫🇮 Finland 🇫🇯 Fiji 🇫🇰 Falkland Islands 🇫🇲 Micronesia 🇫🇴 Faroe Islands 🇫🇷 France 🇬🇦 Gabon 🇬🇧 United Kingdom 🇬🇩 Grenada 🇬🇪 Georgia 🇬🇫 French Guiana 🇬🇬 Guernsey 🇬🇭 Ghana 🇬🇮 Gibraltar 🇬🇱 Greenland 🇬🇲 Gambia 🇬🇳 Guinea 🇬🇵 Guadeloupe 🇬🇶 Equatorial Guinea 🇬🇷 Greece 🇬🇸 South Georgia & South Sandwich Islands 🇬🇹 Guatemala 🇬🇺 Guam 🇬🇼 Guinea-Bissau 🇬🇾 Guyana 🇭🇰 Hong Kong Sar China 🇭🇲 Heard & Mcdonald Islands 🇭🇳 Honduras 🇭🇷 Croatia 🇭🇹 Haiti 🇭🇺 Hungary 🇮🇨 Canary Islands 🇮🇩 Indonesia 🇮🇪 Ireland 🇮🇱 Israel 🇮🇲 Isle of Man 🇮🇳 India 🇮🇴 British Indian Ocean Territory 🇮🇶 Iraq 🇮🇷 Iran 🇮🇸 Iceland 🇮🇹 Italy 🇯🇪 Jersey 🇯🇲 Jamaica 🇯🇴 Jordan 🇯🇵 Japan 🇰🇪 Kenya 🇰🇬 Kyrgyzstan 🇰🇭 Cambodia 🇰🇮 Kiribati 🇰🇲 Comoros 🇰🇳 St. Kitts & Nevis 🇰🇵 North Korea 🇰🇷 South Korea 🇰🇼 Kuwait 🇰🇾 Cayman Islands 🇰🇿 Kazakhstan 🇱🇦 Laos 🇱🇧 Lebanon 🇱🇨 St. Lucia 🇱🇮 Liechtenstein 🇱🇰 Sri Lanka 🇱🇷 Liberia 🇱🇸 Lesotho 🇱🇹 Lithuania 🇱🇺 Luxembourg 🇱🇻 Latvia 🇱🇾 Libya 🇲🇦 Morocco 🇲🇨 Monaco 🇲🇩 Moldova 🇲🇪 Montenegro 🇲🇫 St. Martin 🇲🇬 Madagascar 🇲🇭 Marshall Islands 🇲🇰 Macedonia 🇲🇱 Mali 🇲🇲 Myanmar (Burma) 🇲🇳 Mongolia 🇲🇴 Macau Sar China 🇲🇵 Northern Mariana Islands 🇲🇶 Martinique 🇲🇷 Mauritania 🇲🇸 Montserrat 🇲🇹 Malta 🇲🇺 Mauritius 🇲🇻 Maldives 🇲🇼 Malawi 🇲🇽 Mexico 🇲🇾 Malaysia 🇲🇿 Mozambique 🇳🇦 Namibia 🇳🇨 New Caledonia 🇳🇪 Niger 🇳🇫 Norfolk Island 🇳🇬 Nigeria 🇳🇮 Nicaragua 🇳🇱 Netherlands 🇳🇴 Norway 🇳🇵 Nepal 🇳🇷 Nauru 🇳🇺 Niue 🇳🇿 New Zealand 🇴🇲 Oman 🇵🇦 Panama 🇵🇪 Peru 🇵🇫 French Polynesia 🇵🇬 Papua New Guinea 🇵🇭 Philippines 🇵🇰 Pakistan 🇵🇱 Poland 🇵🇲 St. Pierre & Miquelon 🇵🇳 Pitcairn Islands 🇵🇷 Puerto Rico 🇵🇸 Palestinian Territories 🇵🇹 Portugal 🇵🇼 Palau 🇵🇾 Paraguay 🇶🇦 Qatar 🇷🇪 Réunion 🇷🇴 Romania 🇷🇸 Serbia 🇷🇺 Russia 🇷🇼 Rwanda 🇸🇦 Saudi Arabia 🇸🇧 Solomon Islands 🇸🇨 Seychelles 🇸🇩 Sudan 🇸🇪 Sweden 🇸🇬 Singapore 🇸🇭 St. Helena 🇸🇮 Slovenia 🇸🇯 Svalbard & Jan Mayen 🇸🇰 Slovakia 🇸🇱 Sierra Leone 🇸🇲 San Marino 🇸🇳 Senegal 🇸🇴 Somalia 🇸🇷 Suriname 🇸🇸 South Sudan 🇸🇹 São Tomé & Príncipe 🇸🇻 El Salvador 🇸🇽 Sint Maarten 🇸🇾 Syria 🇸🇿 Swaziland 🇹🇦 Tristan Da Cunha 🇹🇨 Turks & Caicos Islands 🇹🇩 Chad 🇹🇫 French Southern Territories 🇹🇬 Togo 🇹🇭 Thailand 🇹🇯 Tajikistan 🇹🇰 Tokelau 🇹🇱 Timor-Leste 🇹🇲 Turkmenistan 🇹🇳 Tunisia 🇹🇴 Tonga 🇹🇷 Turkey 🇹🇹 Trinidad & Tobago 🇹🇻 Tuvalu 🇹🇼 Taiwan 🇹🇿 Tanzania 🇺🇦 Ukraine 🇺🇬 Uganda 🇺🇲 U.S. Outlying Islands 🇺🇳 United Nations 🇺🇸 United States 🇺🇾 Uruguay 🇺🇿 Uzbekistan 🇻🇦 Vatican City 🇻🇨 St. Vincent & Grenadines 🇻🇪 Venezuela 🇻🇬 British Virgin Islands 🇻🇮 U.S. Virgin Islands 🇻🇳 Vietnam 🇻🇺 Vanuatu 🇼🇫 Wallis & Futuna 🇼🇸 Samoa 🇽🇰 Kosovo 🇾🇪 Yemen 🇾🇹 Mayotte 🇿🇦 South Africa 🇿🇲 Zambia 🇿🇼 Zimbabwe 🏴󠁧󠁢󠁥󠁮󠁧󠁿 Flag for England (GB-ENG) 🏴󠁧󠁢󠁳󠁣󠁴󠁿 Flag for Scotland (GB-SCT) 🏴󠁧󠁢󠁷󠁬󠁳󠁿 Flag for Wales (GB-WLS) 🥆 Rifle 🤻 Modern Pentathlon 🏴‍☠️ Pirate Flag 🇦 Regional Indicator Symbol Letter A 🇧 Regional Indicator Symbol Letter B 🇨 Regional Indicator Symbol Letter C 🇩 Regional Indicator Symbol Letter D 🇪 Regional Indicator Symbol Letter E 🇫 Regional Indicator Symbol Letter F 🇬 Regional Indicator Symbol Letter G 🇭 Regional Indicator Symbol Letter H 🇮 Regional Indicator Symbol Letter I 🇯 Regional Indicator Symbol Letter J 🇰 Regional Indicator Symbol Letter K 🇱 Regional Indicator Symbol Letter L 🇲 Regional Indicator Symbol Letter M 🇳 Regional Indicator Symbol Letter N 🇴 Regional Indicator Symbol Letter O 🇵 Regional Indicator Symbol Letter P 🇶 Regional Indicator Symbol Letter Q 🇷 Regional Indicator Symbol Letter R 🇸 Regional Indicator Symbol Letter S 🇹 Regional Indicator Symbol Letter T 🇺 Regional Indicator Symbol Letter U 🇻 Regional Indicator Symbol Letter V 🇼 Regional Indicator Symbol Letter W 🇽 Regional Indicator Symbol Letter X 🇾 Regional Indicator Symbol Letter Y 🇿 Regional Indicator Symbol Letter Z 🐱‍🐉 Dino Cat 🐱‍🚀 Astro Cat 🐱‍👤 Ninja Cat 🐱‍💻 Hacker Cat 🐱‍🏍 Stunt Cat 🐱‍👓 Hipster Cat ◯‍◯‍◯‍◯‍◯ Olympic Rings 🏴󠁮󠁲󠀰󠀵󠁿 Flag for Baiti (NR-05) 🏴󠁮󠁯󠀱󠀷󠁿 Flag for Nord-Trøndelag (NO-17) 🏴󠁮󠁯󠀱󠀲󠁿 Flag for Hordaland (NO-12) 🏴󠁮󠁯󠀰󠀲󠁿 Flag for Akershus (NO-02) 🏴󠁮󠁯󠀱󠀶󠁿 Flag for Sør-Trøndelag (NO-16) 🏴󠁮󠁯󠀰󠀸󠁿 Flag for Telemark (NO-08) 🏴󠁮󠁬󠁵󠁴󠁿 Flag for Utrecht (NL-UT) 🏴󠁮󠁯󠀱󠀵󠁿 Flag for Møre og Romsdal (NO-15) 🏴󠁮󠁯󠀲󠀱󠁿 Flag for Svalbard (NO-21) 🏴󠁮󠁰󠀴󠁿 Flag for Purwanchal (NP-4) 🏴󠁮󠁰󠀱󠁿 Flag for Central (NP-1) 🏴󠁮󠁯󠀰󠀳󠁿 Flag for Oslo (NO-03) 🏴󠁮󠁲󠀰󠀶󠁿 Flag for Boe (NR-06) 👨🏾‍👨🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁮󠁬󠁮󠁢󠁿 Flag for North Brabant (NL-NB) 🏴󠁮󠁯󠀰󠀹󠁿 Flag for Aust-Agder (NO-09) 🏴󠁮󠁲󠀰󠀲󠁿 Flag for Anabar (NR-02) 🏴󠁮󠁬󠁬󠁩󠁿 Flag for Limburg (NL-LI) 🏴󠁮󠁯󠀰󠀶󠁿 Flag for Buskerud (NO-06) 🏴󠁮󠁯󠀰󠀴󠁿 Flag for Hedmark (NO-04) 🏴󠁮󠁯󠀰󠀷󠁿 Flag for Vestfold (NO-07) 🏴󠁮󠁲󠀰󠀴󠁿 Flag for Anibare (NR-04) 🏴󠁮󠁯󠀲󠀰󠁿 Flag for Finnmark (NO-20) 🏴󠁮󠁬󠁯󠁶󠁿 Flag for Overijssel (NL-OV) 🏴󠁮󠁯󠀱󠀱󠁿 Flag for Rogaland (NO-11) 🏴󠁮󠁯󠀰󠀱󠁿 Flag for Østfold (NO-01) 🏴󠁮󠁲󠀰󠀱󠁿 Flag for Aiwo (NR-01) 🏴󠁮󠁬󠁺󠁥󠁿 Flag for Zeeland (NL-ZE) 🏴󠁮󠁲󠀰󠀷󠁿 Flag for Buada (NR-07) 🏴󠁮󠁯󠀱󠀹󠁿 Flag for Troms (NO-19) 🏴󠁮󠁯󠀰󠀵󠁿 Flag for Oppland (NO-05) 🏴󠁮󠁰󠀲󠁿 Flag for Madhya Pashchimanchal (NP-2) 🏴󠁮󠁲󠀰󠀳󠁿 Flag for Anetan (NR-03) 🏴󠁮󠁰󠀳󠁿 Flag for Western (NP-3) 🏴󠁮󠁯󠀲󠀲󠁿 Flag for Jan Mayen (NO-22) 🏴󠁮󠁯󠀱󠀸󠁿 Flag for Nordland (NO-18) 🏴󠁰󠁡󠀱󠁿 Flag for Bocas del Toro (PA-1) 🏴󠁰󠁡󠀳󠁿 Flag for Colón (PA-3) 🏴󠁯󠁭󠁤󠁡󠁿 Flag for Ad Dakhiliyah (OM-DA) 🏴󠁯󠁭󠁭󠁡󠁿 Flag for Muscat (OM-MA) 🏴󠁮󠁲󠀰󠀹󠁿 Flag for Ewa (NR-09) 🏴󠁮󠁺󠁴󠁫󠁩󠁿 Flag for Taranaki (NZ-TKI) 🏴󠁮󠁲󠀱󠀰󠁿 Flag for Ijuw (NR-10) 🏴󠁮󠁺󠁷󠁴󠁣󠁿 Flag for West Coast (NZ-WTC) 🏴󠁮󠁺󠁳󠁴󠁬󠁿 Flag for Southland (NZ-STL) 🏴󠁮󠁺󠁴󠁡󠁳󠁿 Flag for Tasman (NZ-TAS) 🏴󠁮󠁺󠁭󠁷󠁴󠁿 Flag for Manawatu-Wanganui (NZ-MWT) 🏴󠁮󠁺󠁷󠁫󠁯󠁿 Flag for Waikato (NZ-WKO) 🏴󠁮󠁺󠁭󠁢󠁨󠁿 Flag for Marl (NZ-MBH) 🏴󠁮󠁺󠁢󠁯󠁰󠁿 Flag for Bay of Plenty (NZ-BOP) 🏴󠁮󠁲󠀱󠀲󠁿 Flag for Nibok (NR-12) 🏴󠁯󠁭󠁢󠁵󠁿 Flag for Al Buraimi (OM-BU) 🏴󠁮󠁺󠁡󠁵󠁫󠁿 Flag for Auckland (NZ-AUK) 🏴󠁯󠁭󠁳󠁪󠁿 Flag for Janub ash Sharqiyah (OM-SJ) 🏴󠁯󠁭󠁳󠁳󠁿 Flag for Shamal ash Sharqiyah (OM-SS) 🏴󠁰󠁡󠀲󠁿 Flag for Coclé (PA-2) 🏴󠁮󠁲󠀱󠀱󠁿 Flag for Meneng (NR-11) 🏴󠁰󠁡󠀱󠀰󠁿 Flag for West Panamá (PA-10) 🏴󠁯󠁭󠁺󠁡󠁿 Flag for Ad Dhahirah (OM-ZA) 🏴󠁮󠁺󠁮󠁴󠁬󠁿 Flag for Northland (NZ-NTL) 🏴󠁮󠁺󠁣󠁡󠁮󠁿 Flag for Canterbury (NZ-CAN) 🏴󠁮󠁺󠁧󠁩󠁳󠁿 Flag for Gisborne (NZ-GIS) 🏴󠁮󠁺󠁣󠁩󠁴󠁿 Flag for Chatham Islands (NZ-CIT) 🏴󠁮󠁲󠀱󠀳󠁿 Flag for Uaboe (NR-13) 🏴󠁮󠁲󠀰󠀸󠁿 Flag for Denigomodu (NR-08) 🏴󠁯󠁭󠁭󠁵󠁿 Flag for Musandam (OM-MU) 🏴󠁯󠁭󠁢󠁳󠁿 Flag for Shamal al Batinah (OM-BS) 🏴󠁮󠁺󠁨󠁫󠁢󠁿 Flag for Hawke’s Bay (NZ-HKB) 🏴󠁮󠁺󠁯󠁴󠁡󠁿 Flag for Otago (NZ-OTA) 🏴󠁯󠁭󠁢󠁪󠁿 Flag for Janub al Batinah (OM-BJ) 🏴󠁯󠁭󠁺󠁵󠁿 Flag for Dhofar (OM-ZU) 🏴󠁰󠁡󠀵󠁿 Flag for Darién (PA-5) 🏴󠁰󠁥󠁣󠁡󠁬󠁿 Flag for El Callao (PE-CAL) 🏴󠁰󠁡󠀶󠁿 Flag for Herrera (PA-6) 🏴󠁰󠁡󠁫󠁹󠁿 Flag for Guna Yala (PA-KY) 🏴󠁰󠁡󠁥󠁭󠁿 Flag for Emberá (PA-EM) 🏴󠁰󠁥󠁬󠁡󠁬󠁿 Flag for La Libertad (PE-LAL) 🏴󠁰󠁡󠀹󠁿 Flag for Veraguas (PA-9) 🏴󠁰󠁥󠁬󠁯󠁲󠁿 Flag for Loreto (PE-LOR) 🏴󠁰󠁥󠁡󠁭󠁡󠁿 Flag for Amazonas (PE-AMA) 🏴󠁰󠁡󠀴󠁿 Flag for Chiriquí (PA-4) 🏴󠁰󠁧󠁣󠁰󠁫󠁿 Flag for Chimbu (PG-CPK) 🏴󠁰󠁧󠁥󠁨󠁧󠁿 Flag for Eastern Highlands (PG-EHG) 🏴󠁰󠁥󠁳󠁡󠁭󠁿 Flag for San Martín (PE-SAM) 🏴󠁰󠁥󠁪󠁵󠁮󠁿 Flag for Junín (PE-JUN) 🏴󠁰󠁥󠁨󠁵󠁣󠁿 Flag for Huánuco (PE-HUC) 🏴󠁰󠁥󠁰󠁡󠁳󠁿 Flag for Pasco (PE-PAS) 🏴󠁰󠁡󠁮󠁢󠁿 Flag for Ngöbe-Buglé (PA-NB) 🏴󠁰󠁥󠁣󠁡󠁪󠁿 Flag for Cajamarca (PE-CAJ) 🏴󠁰󠁥󠁩󠁣󠁡󠁿 Flag for Ica (PE-ICA) 🏴󠁰󠁥󠁬󠁩󠁭󠁿 Flag for Lima Region (PE-LIM) 🏴󠁰󠁥󠁭󠁯󠁱󠁿 Flag for Moquegua (PE-MOQ) 🏴󠁰󠁥󠁰󠁵󠁮󠁿 Flag for Puno (PE-PUN) 🏴󠁰󠁥󠁵󠁣󠁡󠁿 Flag for Ucayali (PE-UCA) 🏴󠁰󠁥󠁬󠁭󠁡󠁿 Flag for Lima (PE-LMA) 🏴󠁰󠁥󠁰󠁩󠁵󠁿 Flag for Piura (PE-PIU) 🏴󠁰󠁥󠁴󠁵󠁭󠁿 Flag for Tumbes (PE-TUM) 🏴󠁰󠁥󠁣󠁵󠁳󠁿 Flag for Cusco (PE-CUS) 🏴󠁰󠁡󠀸󠁿 Flag for Panamá (PA-8) 🏴󠁰󠁥󠁴󠁡󠁣󠁿 Flag for Tacna (PE-TAC) 🏴󠁰󠁧󠁣󠁰󠁭󠁿 Flag for Central (PG-CPM) 🏴󠁰󠁡󠀷󠁿 Flag for Los Santos (PA-7) 🏴󠁰󠁥󠁬󠁡󠁭󠁿 Flag for Lambayeque (PE-LAM) 🏴󠁰󠁥󠁨󠁵󠁶󠁿 Flag for Huancavelica (PE-HUV) 🏴󠁰󠁥󠁡󠁮󠁣󠁿 Flag for Ancash (PE-ANC) 🏴󠁰󠁧󠁨󠁬󠁡󠁿 Flag for Hela (PG-HLA) 🏴󠁰󠁧󠁮󠁣󠁤󠁿 Flag for Port Moresby (PG-NCD) 🏴󠁰󠁫󠁩󠁳󠁿 Flag for Islamabad (PK-IS) 🏴󠁰󠁨󠀰󠀰󠁿 Flag for Metro Manila (PH-00) 🏴󠁰󠁨󠀰󠀵󠁿 Flag for Bicol (PH-05) 🏴󠁰󠁧󠁧󠁰󠁫󠁿 Flag for Gulf (PG-GPK) 🏴󠁰󠁨󠀰󠀹󠁿 Flag for Zamboanga Peninsula (PH-09) 🏴󠁰󠁧󠁮󠁳󠁢󠁿 Flag for Bougainville (PG-NSB) 🏴󠁰󠁫󠁧󠁢󠁿 Flag for Gilgit-Baltistan (PK-GB) 🏴󠁰󠁧󠁭󠁰󠁭󠁿 Flag for Madang (PG-MPM) 🏴󠁦󠁪󠁷󠁿 Flag for Western (FJ-W) 🏴󠁰󠁨󠀱󠀲󠁿 Flag for Soccsksargen (PH-12) 🏴󠁰󠁨󠀰󠀸󠁿 Flag for Eastern Visayas (PH-08) 🏴󠁰󠁧󠁥󠁰󠁷󠁿 Flag for Enga (PG-EPW) 🏴󠁰󠁧󠁭󠁢󠁡󠁿 Flag for Milne Bay (PG-MBA) 🏴󠁰󠁨󠀴󠀰󠁿 Flag for Calabarzon (PH-40) 🏴󠁰󠁧󠁪󠁷󠁫󠁿 Flag for Jiwaka (PG-JWK) 🏴󠁰󠁨󠀰󠀲󠁿 Flag for Cagayan Valley (PH-02) 👨🏿‍👨🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁰󠁧󠁭󠁰󠁬󠁿 Flag for Morobe (PG-MPL) 🏴󠁰󠁨󠀱󠀰󠁿 Flag for Northern Mindanao (PH-10) 🏴󠁰󠁨󠀴󠀱󠁿 Flag for Mimaropa (PH-41) 🏴󠁰󠁫󠁢󠁡󠁿 Flag for Balochistan (PK-BA) 🏴󠁰󠁨󠀱󠀳󠁿 Flag for Caraga (PH-13) 🏴󠁰󠁧󠁥󠁳󠁷󠁿 Flag for East Sepik (PG-ESW) 🏴󠁰󠁨󠀰󠀶󠁿 Flag for Western Visayas (PH-06) 🏴󠁰󠁨󠀰󠀳󠁿 Flag for Central Luzon (PH-03) 🏴󠁰󠁨󠀱󠀴󠁿 Flag for Muslim Mindanao (PH-14) 🏴󠁰󠁧󠁳󠁨󠁭󠁿 Flag for Southern Highlands (PG-SHM) 🏴󠁰󠁧󠁷󠁰󠁤󠁿 Flag for Western (PG-WPD) 🏴󠁰󠁧󠁳󠁡󠁮󠁿 Flag for Sandaun (PG-SAN) 🏴󠁰󠁧󠁮󠁩󠁫󠁿 Flag for New Ireland (PG-NIK) 🏴󠁰󠁧󠁮󠁰󠁰󠁿 Flag for Oro (PG-NPP) 🏴󠁰󠁧󠁭󠁲󠁬󠁿 Flag for Manus (PG-MRL) 🏴󠁰󠁧󠁷󠁨󠁭󠁿 Flag for Western Highlands (PG-WHM) 🏴󠁰󠁨󠀱󠀱󠁿 Flag for Davao (PH-11) 🏴󠁰󠁫󠁰󠁢󠁿 Flag for Punjab (PK-PB) 🏴󠁰󠁬󠁰󠁭󠁿 Flag for Federal Capital Territory (PL-PM) 🏴󠁰󠁬󠁳󠁬󠁿 Flag for Silesia (PL-SL) 🏴󠁰󠁬󠁫󠁰󠁿 Flag for Kuyavian-Pomerania (PL-KP) 🏴󠁰󠁳󠁴󠁢󠁳󠁿 Flag for Tubas (PS-TBS) 🏴󠁰󠁳󠁲󠁢󠁨󠁿 Flag for Ramallah and al-Bireh (PS-RBH) 🏴󠁰󠁳󠁧󠁺󠁡󠁿 Flag for Gaza (PS-GZA) 🏴󠁰󠁳󠁲󠁦󠁨󠁿 Flag for Rafah (PS-RFH) 🏴󠁰󠁳󠁨󠁢󠁮󠁿 Flag for Hebron (PS-HBN) 🏴󠁰󠁬󠁰󠁤󠁿 Flag for Podlaskie (PL-PD) 🏴󠁰󠁬󠁰󠁫󠁿 Flag for Subcarpathia (PL-PK) 🏴󠁰󠁳󠁪󠁥󠁮󠁿 Flag for Jenin (PS-JEN) 🏴󠁰󠁬󠁤󠁳󠁿 Flag for Lower Silesian (PL-DS) 🏴󠁰󠁳󠁫󠁹󠁳󠁿 Flag for Khan Yunis (PS-KYS) 🏴󠁰󠁬󠁬󠁤󠁿 Flag for Łódź (PL-LD) 🏴󠁰󠁳󠁮󠁧󠁺󠁿 Flag for North Gaza (PS-NGZ) 🏴󠁰󠁬󠁺󠁰󠁿 Flag for West Pomerania (PL-ZP) 🏴󠁰󠁫󠁪󠁫󠁿 Flag for Azad Kashmir (PK-JK) 🏴󠁰󠁳󠁳󠁬󠁴󠁿 Flag for Salfit (PS-SLT) 🏴󠁰󠁬󠁭󠁺󠁿 Flag for Mazovia (PL-MZ) 🏴󠁰󠁬󠁭󠁡󠁿 Flag for Lesser Poland (PL-MA) 🏴󠁰󠁳󠁱󠁱󠁡󠁿 Flag for Qalqilya (PS-QQA) 🏴󠁰󠁴󠀰󠀱󠁿 Flag for Aveiro (PT-01) 🏴󠁰󠁬󠁷󠁰󠁿 Flag for Greater Poland (PL-WP) 🏴󠁰󠁬󠁯󠁰󠁿 Flag for Opole (PL-OP) 🏴󠁰󠁳󠁢󠁴󠁨󠁿 Flag for Bethlehem (PS-BTH) 🏴󠁰󠁫󠁫󠁰󠁿 Flag for Khyber Pakhtunkhwa (PK-KP) 🏴󠁰󠁳󠁴󠁫󠁭󠁿 Flag for Tulkarm (PS-TKM) 🏴󠁰󠁳󠁮󠁢󠁳󠁿 Flag for Nablus (PS-NBS) 🏴󠁰󠁬󠁷󠁮󠁿 Flag for Warmian-Masuria (PL-WN) 🏴󠁰󠁳󠁪󠁲󠁨󠁿 Flag for Jericho (PS-JRH) 🏴󠁰󠁫󠁳󠁤󠁿 Flag for Sindh (PK-SD) 🏴󠁰󠁬󠁬󠁵󠁿 Flag for Lublin (PL-LU) 🏴󠁰󠁳󠁪󠁥󠁭󠁿 Flag for Jerusalem (PS-JEM) 🏴󠁰󠁬󠁬󠁢󠁿 Flag for Lubusz (PL-LB) 🏴󠁰󠁬󠁳󠁫󠁿 Flag for Świętokrzyskie (PL-SK) 🏴󠁰󠁷󠀲󠀱󠀲󠁿 Flag for Melekeok (PW-212) 🏴󠁰󠁴󠀰󠀸󠁿 Flag for Faro (PT-08) 🏴󠁰󠁹󠀱󠀱󠁿 Flag for Central (PY-11) 🏴󠁰󠁴󠀰󠀷󠁿 Flag for Évora (PT-07) 🏴󠁰󠁷󠀲󠀲󠀸󠁿 Flag for Ngiwal (PW-228) 🏴󠁰󠁹󠀱󠀲󠁿 Flag for Ñeembucú (PY-12) 🏴󠁰󠁴󠀱󠀶󠁿 Flag for Viana do Castelo (PT-16) 🏴󠁰󠁴󠀱󠀱󠁿 Flag for Lisbon (PT-11) 🏴󠁰󠁹󠀱󠀵󠁿 Flag for Presidente Hayes (PY-15) 🏴󠁰󠁴󠀱󠀷󠁿 Flag for Vila Real (PT-17) 🏴󠁰󠁴󠀱󠀸󠁿 Flag for Viseu (PT-18) 🏴󠁰󠁷󠀰󠀰󠀴󠁿 Flag for Airai (PW-004) 🏴󠁰󠁹󠀱󠀳󠁿 Flag for Amambay (PY-13) 🏴󠁰󠁷󠀲󠀲󠀴󠁿 Flag for Ngatpang (PW-224) 🏴󠁰󠁴󠀰󠀶󠁿 Flag for Coimbra (PT-06) 🏴󠁰󠁴󠀱󠀲󠁿 Flag for Portalegre (PT-12) 🏴󠁰󠁷󠀳󠀵󠀰󠁿 Flag for Peleliu (PW-350) 🏴󠁰󠁷󠀲󠀲󠀲󠁿 Flag for Ngardmau (PW-222) 🏴󠁰󠁷󠀲󠀱󠀴󠁿 Flag for Ngaraard (PW-214) 🏴󠁰󠁹󠀱󠀴󠁿 Flag for Canindeyú (PY-14) 🏴󠁰󠁷󠀰󠀱󠀰󠁿 Flag for Angaur (PW-010) 🏴󠁰󠁷󠀳󠀷󠀰󠁿 Flag for Sonsorol (PW-370) 🏴󠁰󠁴󠀰󠀴󠁿 Flag for Bragança (PT-04) 🏴󠁰󠁴󠀰󠀵󠁿 Flag for Castelo Branco (PT-05) 🏴󠁰󠁴󠀱󠀴󠁿 Flag for Santarém (PT-14) 🏴󠁰󠁴󠀰󠀳󠁿 Flag for Braga (PT-03) 🏴󠁰󠁷󠀰󠀵󠀰󠁿 Flag for Hatohobei (PW-050) 🏴󠁰󠁷󠀱󠀵󠀰󠁿 Flag for Koror (PW-150) 🏴󠁰󠁹󠀱󠀰󠁿 Flag for Alto Paraná (PY-10) 🏴󠁰󠁷󠀲󠀲󠀷󠁿 Flag for Ngeremlengui (PW-227) 🏴󠁰󠁴󠀱󠀰󠁿 Flag for Leiria (PT-10) 🏴󠁰󠁴󠀱󠀳󠁿 Flag for Porto (PT-13) 🏴󠁰󠁴󠀱󠀵󠁿 Flag for Setúbal (PT-15) 🏴󠁰󠁷󠀰󠀰󠀲󠁿 Flag for Aimeliik (PW-002) 🏴󠁰󠁷󠀲󠀲󠀶󠁿 Flag for Ngchesar (PW-226) 🏴󠁰󠁴󠀰󠀹󠁿 Flag for Guarda (PT-09) 🏴󠁰󠁹󠀲󠁿 Flag for San Pedro (PY-2) 🏴󠁰󠁹󠀵󠁿 Flag for Caaguazú (PY-5) 🏴󠁰󠁹󠀴󠁿 Flag for Guairá (PY-4) 🏴󠁲󠁯󠁢󠁣󠁿 Flag for Bacău (RO-BC) 🏴󠁰󠁹󠀷󠁿 Flag for Itapúa (PY-7) 🏴󠁲󠁯󠁣󠁳󠁿 Flag for Caraș-Severin (RO-CS) 🏴󠁰󠁹󠀶󠁿 Flag for Caazapá (PY-6) 🏴󠁱󠁡󠁫󠁨󠁿 Flag for Al Khor (QA-KH) 🏴󠁲󠁯󠁣󠁶󠁿 Flag for Covasna (RO-CV) 🏴󠁲󠁯󠁡󠁢󠁿 Flag for Alba (RO-AB) 🏴󠁱󠁡󠁤󠁡󠁿 Flag for Doha (QA-DA) 🏴󠁲󠁯󠁤󠁪󠁿 Flag for Dolj (RO-DJ) 🏴󠁰󠁹󠀳󠁿 Flag for Cordillera (PY-3) 🏴󠁱󠁡󠁭󠁳󠁿 Flag for Madinat ash Shamal (QA-MS) 🏴󠁲󠁯󠁢󠁨󠁿 Flag for Bihor (RO-BH) 🏴󠁲󠁯󠁨󠁲󠁿 Flag for Harghita (RO-HR) 🏴󠁲󠁯󠁢󠁲󠁿 Flag for Brăila (RO-BR) 🏴󠁲󠁯󠁡󠁧󠁿 Flag for Argeș (RO-AG) 🏴󠁱󠁡󠁺󠁡󠁿 Flag for Al Daayen (QA-ZA) 🏴󠁲󠁯󠁢󠁮󠁿 Flag for Bistriţa-Năsăud (RO-BN) 🏴󠁲󠁯󠁣󠁬󠁿 Flag for Călărași (RO-CL) 🏴󠁰󠁹󠁡󠁳󠁵󠁿 Flag for Asunción (PY-ASU) 🏴󠁰󠁹󠀱󠁿 Flag for Concepción (PY-1) 🏴󠁲󠁯󠁢󠁴󠁿 Flag for Botoşani (RO-BT) 🏴󠁲󠁯󠁧󠁬󠁿 Flag for Galați (RO-GL) 🏴󠁲󠁯󠁧󠁲󠁿 Flag for Giurgiu (RO-GR) 🏴󠁰󠁹󠀱󠀹󠁿 Flag for Boquerón (PY-19) 🏴󠁰󠁹󠀸󠁿 Flag for Misiones (PY-8) 🏴󠁲󠁯󠁢󠁿 Flag for Bucharest (RO-B) 🏴󠁰󠁹󠀹󠁿 Flag for Paraguarí (PY-9) 🏴󠁱󠁡󠁲󠁡󠁿 Flag for Al Rayyan (QA-RA) 🏴󠁲󠁯󠁣󠁴󠁿 Flag for Constanța (RO-CT) 🏴󠁲󠁯󠁨󠁤󠁿 Flag for Hunedoara (RO-HD) 🏴󠁲󠁯󠁤󠁢󠁿 Flag for Dâmbovița (RO-DB) 🏴󠁲󠁯󠁡󠁲󠁿 Flag for Arad (RO-AR) 🏴󠁲󠁯󠁣󠁪󠁿 Flag for Cluj (RO-CJ) 🏴󠁲󠁯󠁢󠁺󠁿 Flag for Buzău (RO-BZ) 🏴󠁱󠁡󠁷󠁡󠁿 Flag for Al Wakrah (QA-WA) 🏴󠁲󠁯󠁶󠁬󠁿 Flag for Vâlcea (RO-VL) 🏴󠁲󠁯󠁩󠁳󠁿 Flag for Iași (RO-IS) 🏴󠁲󠁯󠁭󠁨󠁿 Flag for Mehedinți (RO-MH) 🏴󠁲󠁳󠁫󠁭󠁿 Flag for Kosovo-Metohija (RS-KM) 🏴󠁲󠁯󠁩󠁬󠁿 Flag for Ialomița (RO-IL) 🏴󠁲󠁯󠁴󠁲󠁿 Flag for Teleorman (RO-TR) 🏴󠁲󠁳󠀱󠀲󠁿 Flag for Šumadija (RS-12) 🏴󠁲󠁳󠀲󠀰󠁿 Flag for Nišava (RS-20) 🏴󠁲󠁵󠁡󠁬󠁿 Flag for Altai (RU-AL) 🏴󠁲󠁯󠁶󠁮󠁿 Flag for Vrancea (RO-VN) 🏴󠁲󠁯󠁶󠁳󠁿 Flag for Vaslui (RO-VS) 🏴󠁲󠁯󠁩󠁦󠁿 Flag for Ilfov (RO-IF) 🏴󠁲󠁳󠀰󠀸󠁿 Flag for Mačva (RS-08) 🏴󠁲󠁳󠀰󠀹󠁿 Flag for Kolubara (RS-09) 🏴󠁲󠁯󠁰󠁨󠁿 Flag for Prahova (RO-PH) 🏴󠁲󠁳󠀱󠀱󠁿 Flag for Braničevo (RS-11) 🏴󠁲󠁳󠀰󠀰󠁿 Flag for Beograd (RS-00) 🏴󠁲󠁳󠀱󠀵󠁿 Flag for Zaječar (RS-15) 🏴󠁲󠁳󠀱󠀷󠁿 Flag for Moravica (RS-17) 🏴󠁲󠁳󠀱󠀳󠁿 Flag for Pomoravlje (RS-13) 🏴󠁲󠁯󠁯󠁴󠁿 Flag for Olt (RO-OT) 🏴󠁲󠁯󠁳󠁭󠁿 Flag for Satu Mare (RO-SM) 🏴󠁲󠁳󠀲󠀱󠁿 Flag for Toplica (RS-21) 🏴󠁲󠁯󠁳󠁪󠁿 Flag for Sălaj (RO-SJ) 🏴󠁲󠁯󠁭󠁳󠁿 Flag for Mureş (RO-MS) 🏴󠁲󠁳󠀲󠀲󠁿 Flag for Pirot (RS-22) 🏴󠁲󠁳󠀱󠀹󠁿 Flag for Rasina (RS-19) 🏴󠁲󠁳󠀲󠀴󠁿 Flag for Pčinja (RS-24) 🏴󠁲󠁯󠁭󠁭󠁿 Flag for Maramureş (RO-MM) 🏴󠁲󠁯󠁳󠁶󠁿 Flag for Suceava (RO-SV) 🏴󠁲󠁳󠀱󠀸󠁿 Flag for Raška (RS-18) 🏴󠁲󠁳󠀱󠀴󠁿 Flag for Bor (RS-14) 🏴󠁲󠁳󠀱󠀰󠁿 Flag for Podunavlje (RS-10) 🏴󠁲󠁯󠁮󠁴󠁿 Flag for Neamţ (RO-NT) 🏴󠁲󠁳󠀱󠀶󠁿 Flag for Zlatibor (RS-16) 🏴󠁲󠁳󠁶󠁯󠁿 Flag for Vojvodina (RS-VO) 🏴󠁲󠁳󠀲󠀳󠁿 Flag for Jablanica (RS-23) 🏴󠁲󠁯󠁴󠁬󠁿 Flag for Tulcea (RO-TL) 🏴󠁲󠁵󠁡󠁤󠁿 Flag for Adygea (RU-AD) 🏴󠁲󠁯󠁴󠁭󠁿 Flag for Timiș (RO-TM) 👩🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁲󠁵󠁫󠁣󠁿 Flag for Karachay-Cherkess (RU-KC) 🏴󠁲󠁵󠁫󠁫󠁿 Flag for Khakassia (RU-KK) 🏴󠁲󠁵󠁢󠁵󠁿 Flag for Buryat (RU-BU) 🏴󠁲󠁵󠁫󠁬󠁿 Flag for Kalmykia (RU-KL) 🏴󠁲󠁵󠁢󠁥󠁬󠁿 Flag for Belgorod (RU-BEL) 🏴󠁲󠁵󠁫󠁨󠁭󠁿 Flag for Khanty-Mansi (RU-KHM) 🏴󠁲󠁵󠁬󠁥󠁮󠁿 Flag for Leningrad (RU-LEN) 🏴󠁲󠁵󠁫󠁧󠁮󠁿 Flag for Kurgan (RU-KGN) 🏴󠁲󠁵󠁩󠁶󠁡󠁿 Flag for Ivanovo (RU-IVA) 🏴󠁲󠁵󠁩󠁮󠁿 Flag for Ingushetia (RU-IN) 🏴󠁲󠁵󠁫󠁩󠁲󠁿 Flag for Kirov (RU-KIR) 🏴󠁲󠁵󠁫󠁤󠁡󠁿 Flag for Krasnodar Krai (RU-KDA) 🏴󠁲󠁵󠁫󠁲󠁿 Flag for Karelia (RU-KR) 🏴󠁲󠁵󠁭󠁡󠁧󠁿 Flag for Magadan (RU-MAG) 🏴󠁲󠁵󠁫󠁹󠁡󠁿 Flag for Krasnoyarsk Krai (RU-KYA) 🏴󠁲󠁵󠁫󠁥󠁭󠁿 Flag for Kemerovo (RU-KEM) 🏴󠁲󠁵󠁡󠁳󠁴󠁿 Flag for Astrakhan (RU-AST) 🏴󠁲󠁵󠁡󠁭󠁵󠁿 Flag for Amur (RU-AMU) 🏴󠁲󠁵󠁭󠁯󠁿 Flag for Mordovia (RU-MO) 🏴󠁲󠁵󠁫󠁯󠁿 Flag for Komi (RU-KO) 🏴󠁲󠁵󠁣󠁨󠁥󠁿 Flag for Chelyabinsk (RU-CHE) 🏴󠁲󠁵󠁫󠁨󠁡󠁿 Flag for Khabarovsk Krai (RU-KHA) 🏴󠁲󠁵󠁫󠁲󠁳󠁿 Flag for Kursk (RU-KRS) 🏴󠁲󠁵󠁭󠁥󠁿 Flag for Mari El (RU-ME) 🏴󠁲󠁵󠁣󠁨󠁵󠁿 Flag for Chukotka Okrug (RU-CHU) 🏴󠁲󠁵󠁫󠁧󠁤󠁿 Flag for Kaliningrad (RU-KGD) 🏴󠁲󠁵󠁩󠁲󠁫󠁿 Flag for Irkutsk (RU-IRK) 🏴󠁲󠁵󠁫󠁬󠁵󠁿 Flag for Kaluga (RU-KLU) 🏴󠁲󠁵󠁫󠁢󠁿 Flag for Kabardino-Balkar (RU-KB) 🏴󠁲󠁵󠁬󠁩󠁰󠁿 Flag for Lipetsk (RU-LIP) 🏴󠁲󠁵󠁢󠁡󠁿 Flag for Bashkortostan (RU-BA) 🏴󠁲󠁵󠁣󠁵󠁿 Flag for Chuvash (RU-CU) 🏴󠁲󠁵󠁫󠁡󠁭󠁿 Flag for Kamchatka Krai (RU-KAM) 🏴󠁲󠁵󠁫󠁯󠁳󠁿 Flag for Kostroma (RU-KOS) 🏴󠁲󠁵󠁳󠁡󠁫󠁿 Flag for Sakhalin (RU-SAK) 🏴󠁲󠁵󠁴󠁶󠁥󠁿 Flag for Tver (RU-TVE) 🏴󠁲󠁵󠁮󠁶󠁳󠁿 Flag for Novosibirsk (RU-NVS) 🏴󠁲󠁵󠁶󠁬󠁡󠁿 Flag for Vladimir (RU-VLA) 🏴󠁲󠁵󠁯󠁲󠁬󠁿 Flag for Oryol (RU-ORL) 🏴󠁲󠁵󠁳󠁴󠁡󠁿 Flag for Stavropol Krai (RU-STA) 🏴󠁲󠁵󠁮󠁩󠁺󠁿 Flag for Nizhny Novgorod (RU-NIZ) 🏴󠁲󠁵󠁳󠁡󠁲󠁿 Flag for Saratov (RU-SAR) 🏴󠁲󠁵󠁯󠁲󠁥󠁿 Flag for Orenburg (RU-ORE) 🏴󠁲󠁵󠁮󠁥󠁮󠁿 Flag for Nenets (RU-NEN) 🏴󠁲󠁵󠁶󠁧󠁧󠁿 Flag for Volgograd (RU-VGG) 🏴󠁲󠁵󠁴󠁯󠁭󠁿 Flag for Tomsk (RU-TOM) 🏴󠁲󠁵󠁳󠁶󠁥󠁿 Flag for Sverdlovsk (RU-SVE) 🏴󠁲󠁵󠁳󠁰󠁥󠁿 Flag for Saint Petersburg (RU-SPE) 🏴󠁲󠁵󠁹󠁡󠁮󠁿 Flag for Yamalo-Nenets Okrug (RU-YAN) 🏴󠁲󠁵󠁳󠁡󠁿 Flag for Sakha (RU-SA) 🏴󠁲󠁵󠁭󠁯󠁷󠁿 Flag for Moscow (RU-MOW) 🏴󠁲󠁵󠁰󠁮󠁺󠁿 Flag for Penza (RU-PNZ) 🏴󠁲󠁵󠁳󠁭󠁯󠁿 Flag for Smolensk (RU-SMO) 🏴󠁲󠁵󠁴󠁡󠁿 Flag for Tatarstan (RU-TA) 🏴󠁲󠁵󠁶󠁬󠁧󠁿 Flag for Vologda (RU-VLG) 🏴󠁲󠁵󠁴󠁵󠁬󠁿 Flag for Tula (RU-TUL) 🏴󠁲󠁵󠁹󠁡󠁲󠁿 Flag for Yaroslavl (RU-YAR) 🏴󠁲󠁵󠁴󠁹󠁵󠁿 Flag for Tyumen (RU-TYU) 🏴󠁲󠁵󠁰󠁳󠁫󠁿 Flag for Pskov (RU-PSK) 🏴󠁲󠁵󠁵󠁤󠁿 Flag for Udmurt (RU-UD) 🏴󠁲󠁵󠁳󠁡󠁭󠁿 Flag for Samara (RU-SAM) 🏴󠁲󠁵󠁵󠁬󠁹󠁿 Flag for Ulyanovsk (RU-ULY) 🏴󠁲󠁵󠁲󠁹󠁡󠁿 Flag for Ryazan (RU-RYA) 🏴󠁲󠁵󠁯󠁭󠁳󠁿 Flag for Omsk (RU-OMS) 🏴󠁲󠁵󠁰󠁥󠁲󠁿 Flag for Perm Krai (RU-PER) 🏴󠁲󠁵󠁶󠁯󠁲󠁿 Flag for Voronezh (RU-VOR) 🏴󠁲󠁵󠁮󠁧󠁲󠁿 Flag for Novgorod (RU-NGR) 🏴󠁲󠁵󠁴󠁡󠁭󠁿 Flag for Tambov (RU-TAM) 🏴󠁲󠁵󠁴󠁹󠁿 Flag for Tuva (RU-TY) 🏴󠁲󠁵󠁲󠁯󠁳󠁿 Flag for Rostov (RU-ROS) 🏴󠁲󠁵󠁭󠁵󠁲󠁿 Flag for Murmansk (RU-MUR) 🏴󠁲󠁷󠀰󠀱󠁿 Flag for Kigali (RW-01) 🏴󠁳󠁣󠀰󠀳󠁿 Flag for Anse Etoile (SC-03) 🏴󠁳󠁢󠁩󠁳󠁿 Flag for Isabel (SB-IS) 🏴󠁳󠁣󠀰󠀲󠁿 Flag for Anse Boileau (SC-02) 🏴󠁳󠁡󠀰󠀷󠁿 Flag for Tabuk (SA-07) 🏴󠁳󠁢󠁧󠁵󠁿 Flag for Guadalcanal (SB-GU) 🏴󠁲󠁷󠀰󠀳󠁿 Flag for Northern (RW-03) 🏴󠁲󠁷󠀰󠀵󠁿 Flag for Southern (RW-05) 🏴󠁳󠁢󠁣󠁥󠁿 Flag for Central (SB-CE) 🏴󠁳󠁡󠀰󠀶󠁿 Flag for Ha’il (SA-06) 🏴󠁳󠁣󠀰󠀹󠁿 Flag for Bel Air (SC-09) 🏴󠁳󠁢󠁭󠁬󠁿 Flag for Malaita (SB-ML) 🏴󠁳󠁡󠀱󠀰󠁿 Flag for Najran (SA-10) 🏴󠁳󠁡󠀱󠀲󠁿 Flag for Al Jawf (SA-12) 🏴󠁳󠁢󠁣󠁴󠁿 Flag for Honiara (SB-CT) 🏴󠁳󠁢󠁷󠁥󠁿 Flag for Western (SB-WE) 🏴󠁳󠁡󠀰󠀸󠁿 Flag for Northern Borders (SA-08) 🏴󠁳󠁡󠀰󠀱󠁿 Flag for Riyadh (SA-01) 🏴󠁳󠁢󠁲󠁢󠁿 Flag for Rennell and Bellona (SB-RB) 🏴󠁳󠁣󠀰󠀴󠁿 Flag for Au Cap (SC-04) 🏴󠁲󠁷󠀰󠀲󠁿 Flag for Eastern (RW-02) 🏴󠁳󠁣󠀰󠀵󠁿 Flag for Anse Royale (SC-05) 🏴󠁲󠁵󠁹󠁥󠁶󠁿 Flag for Jewish (RU-YEV) 🏴󠁳󠁣󠀱󠀰󠁿 Flag for Bel Ombre (SC-10) 🏴󠁳󠁡󠀰󠀵󠁿 Flag for Al-Qassim (SA-05) 🏴󠁳󠁢󠁴󠁥󠁿 Flag for Temotu (SB-TE) 🏴󠁳󠁣󠀰󠀷󠁿 Flag for Baie Sainte Anne (SC-07) 🏴󠁳󠁢󠁣󠁨󠁿 Flag for Choiseul (SB-CH) 🏴󠁲󠁷󠀰󠀴󠁿 Flag for Western (RW-04) 🏴󠁳󠁢󠁭󠁫󠁿 Flag for Makira-Ulawa (SB-MK) 🏴󠁳󠁡󠀰󠀲󠁿 Flag for Makkah (SA-02) 🏴󠁳󠁡󠀰󠀹󠁿 Flag for Jizan (SA-09) 🏴󠁳󠁣󠀰󠀱󠁿 Flag for Anse aux Pins (SC-01) 🏴󠁳󠁡󠀰󠀴󠁿 Flag for Eastern (SA-04) 🏴󠁳󠁡󠀱󠀴󠁿 Flag for Asir (SA-14) 🏴󠁲󠁵󠁺󠁡󠁢󠁿 Flag for Zabaykalsky Krai (RU-ZAB) 🏴󠁳󠁣󠀰󠀸󠁿 Flag for Beau Vallon (SC-08) 🏴󠁳󠁡󠀰󠀳󠁿 Flag for Al Madinah (SA-03) 🏴󠁳󠁣󠀰󠀶󠁿 Flag for Baie Lazare (SC-06) 🏴󠁳󠁣󠀱󠀹󠁿 Flag for Plaisance (SC-19) 🏴󠁳󠁥󠁤󠁿 Flag for Södermanland (SE-D) 🏴󠁳󠁣󠀱󠀶󠁿 Flag for La Rivière Anglaise (SC-16) 🏴󠁳󠁣󠀲󠀲󠁿 Flag for Saint Louis (SC-22) 🏴󠁳󠁣󠀱󠀸󠁿 Flag for Mont Fleuri (SC-18) 🏴󠁳󠁤󠁮󠁯󠁿 Flag for Northern (SD-NO) 🏴󠁳󠁣󠀱󠀳󠁿 Flag for Grand’Anse Mahé (SC-13) 🏴󠁳󠁣󠀲󠀳󠁿 Flag for Takamaka (SC-23) 🏴󠁳󠁤󠁤󠁷󠁿 Flag for West Darfur (SD-DW) 🏴󠁳󠁤󠁧󠁤󠁿 Flag for Al Qadarif (SD-GD) 🏴󠁳󠁤󠁤󠁳󠁿 Flag for South Darfur (SD-DS) 🏴󠁳󠁤󠁮󠁲󠁿 Flag for River Nile (SD-NR) 🏴󠁳󠁤󠁧󠁫󠁿 Flag for West Kurdufan (SD-GK) 🏴󠁳󠁤󠁫󠁡󠁿 Flag for Kassala (SD-KA) 🏴󠁳󠁤󠁫󠁨󠁿 Flag for Khartoum (SD-KH) 🏴󠁳󠁣󠀱󠀵󠁿 Flag for La Digue (SC-15) 🏴󠁳󠁣󠀲󠀴󠁿 Flag for Les Mamelles (SC-24) 🏴󠁳󠁣󠀲󠀱󠁿 Flag for Port Glaud (SC-21) 🏴󠁳󠁥󠁡󠁣󠁿 Flag for Västerbotten (SE-AC) 🏴󠁳󠁥󠁦󠁿 Flag for Jönköping (SE-F) 🏴󠁳󠁥󠁡󠁢󠁿 Flag for Stockholm (SE-AB) 🏴󠁳󠁣󠀱󠀲󠁿 Flag for Glacis (SC-12) 🏴󠁳󠁣󠀲󠀰󠁿 Flag for Pointe La Rue (SC-20) 🏴󠁳󠁤󠁮󠁷󠁿 Flag for White Nile (SD-NW) 🏴󠁳󠁤󠁧󠁺󠁿 Flag for Al Jazirah (SD-GZ) 🏴󠁳󠁥󠁥󠁿 Flag for Östergötland (SE-E) 🏴󠁳󠁥󠁢󠁤󠁿 Flag for Norrbotten (SE-BD) 🏴󠁳󠁥󠁣󠁿 Flag for Uppsala (SE-C) 🏴󠁳󠁣󠀱󠀷󠁿 Flag for Mont Buxton (SC-17) 🏴󠁳󠁣󠀱󠀴󠁿 Flag for Grand’Anse Praslin (SC-14) 🏴󠁳󠁤󠁫󠁳󠁿 Flag for South Kurdufan (SD-KS) 🏴󠁳󠁣󠀱󠀱󠁿 Flag for Cascade (SC-11) 🏴󠁳󠁤󠁫󠁮󠁿 Flag for North Kurdufan (SD-KN) 🏴󠁳󠁤󠁳󠁩󠁿 Flag for Sennar (SD-SI) 🏴󠁳󠁤󠁤󠁥󠁿 Flag for East Darfur (SD-DE) 🏴󠁳󠁤󠁮󠁢󠁿 Flag for Blue Nile (SD-NB) 🏴󠁳󠁤󠁤󠁮󠁿 Flag for North Darfur (SD-DN) 🏴󠁳󠁤󠁤󠁣󠁿 Flag for Central Darfur (SD-DC) 🏴󠁳󠁥󠁵󠁿 Flag for Västmanland (SE-U) 🏴󠁳󠁥󠁳󠁿 Flag for Värmland (SE-S) 🏴󠁳󠁩󠀰󠀱󠀷󠁿 Flag for Črnomelj (SI-017) 🏴󠁳󠁥󠁹󠁿 Flag for Västernorrland (SE-Y) 🏴󠁳󠁧󠀰󠀵󠁿 Flag for South West (SG-05) 🏴󠁳󠁩󠀰󠀱󠀶󠁿 Flag for Črna na Koroškem (SI-016) 🏴󠁳󠁥󠁯󠁿 Flag for Västra Götaland (SE-O) 🏴󠁳󠁥󠁸󠁿 Flag for Gävleborg (SE-X) 🏴󠁳󠁧󠀰󠀲󠁿 Flag for North East (SG-02) 🏴󠁳󠁩󠀰󠀰󠀷󠁿 Flag for Brda (SI-007) 🏴󠁳󠁥󠁨󠁿 Flag for Kalmar (SE-H) 🏴󠁳󠁩󠀰󠀱󠀸󠁿 Flag for Destrnik (SI-018) 🏴󠁳󠁩󠀰󠀰󠀲󠁿 Flag for Beltinci (SI-002) 🏴󠁳󠁩󠀰󠀰󠀴󠁿 Flag for Bohinj (SI-004) 🏴󠁳󠁩󠀰󠀰󠀹󠁿 Flag for Brežice (SI-009) 🏴󠁳󠁧󠀰󠀳󠁿 Flag for North West (SG-03) 🏴󠁳󠁨󠁡󠁣󠁿 Flag for Ascension Island (SH-AC) 👩🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁳󠁩󠀰󠀱󠀲󠁿 Flag for Cerklje na Gorenjskem (SI-012) 🏴󠁳󠁩󠀰󠀱󠀳󠁿 Flag for Cerknica (SI-013) 🏴󠁳󠁩󠀰󠀰󠀶󠁿 Flag for Bovec (SI-006) 🏴󠁳󠁩󠀰󠀱󠀵󠁿 Flag for Črenšovci (SI-015) 🏴󠁳󠁥󠁧󠁿 Flag for Kronoberg (SE-G) 🏴󠁳󠁩󠀰󠀰󠀱󠁿 Flag for Ajdovščina (SI-001) 🏴󠁳󠁩󠀰󠀱󠀰󠁿 Flag for Tišina (SI-010) 🏴󠁳󠁧󠀰󠀴󠁿 Flag for South East (SG-04) 🏴󠁳󠁩󠀰󠀰󠀸󠁿 Flag for Brezovica (SI-008) 🏴󠁳󠁨󠁨󠁬󠁿 Flag for Saint Helena (SH-HL) 🏴󠁳󠁥󠁺󠁿 Flag for Jämtland (SE-Z) 🏴󠁳󠁥󠁩󠁿 Flag for Gotland (SE-I) 🏴󠁳󠁥󠁷󠁿 Flag for Dalarna (SE-W) 🏴󠁳󠁥󠁫󠁿 Flag for Blekinge (SE-K) 🏴󠁳󠁩󠀰󠀰󠀵󠁿 Flag for Borovnica (SI-005) 🏴󠁳󠁨󠁴󠁡󠁿 Flag for Tristan da Cunha (SH-TA) 🏴󠁳󠁩󠀰󠀰󠀳󠁿 Flag for Bled (SI-003) 🏴󠁳󠁩󠀰󠀱󠀴󠁿 Flag for Cerkno (SI-014) 🏴󠁳󠁥󠁴󠁿 Flag for Örebro (SE-T) 🏴󠁳󠁩󠀰󠀲󠀳󠁿 Flag for Domžale (SI-023) 🏴󠁳󠁩󠀰󠀴󠀰󠁿 Flag for Izola (SI-040) 🏴󠁳󠁩󠀰󠀵󠀶󠁿 Flag for Kuzma (SI-056) 🏴󠁳󠁩󠀰󠀲󠀵󠁿 Flag for Dravograd (SI-025) 🏴󠁳󠁩󠀰󠀲󠀶󠁿 Flag for Duplek (SI-026) 🏴󠁳󠁩󠀰󠀴󠀱󠁿 Flag for Jesenice (SI-041) 🏴󠁳󠁩󠀰󠀲󠀸󠁿 Flag for Gorišnica (SI-028) 🏴󠁳󠁩󠀰󠀲󠀹󠁿 Flag for Gornja Radgona (SI-029) 🏴󠁳󠁩󠀰󠀲󠀰󠁿 Flag for Dobrepolje (SI-020) 🏴󠁳󠁩󠀰󠀳󠀱󠁿 Flag for Gornji Petrovci (SI-031) 🏴󠁳󠁩󠀰󠀲󠀴󠁿 Flag for Dornava (SI-024) 🏴󠁳󠁩󠀰󠀳󠀴󠁿 Flag for Hrastnik (SI-034) 🏴󠁳󠁩󠀰󠀳󠀹󠁿 Flag for Ivančna Gorica (SI-039) 🏴󠁳󠁩󠀰󠀴󠀹󠁿 Flag for Komen (SI-049) 🏴󠁳󠁩󠀰󠀵󠀱󠁿 Flag for Kozje (SI-051) 🏴󠁳󠁩󠀰󠀱󠀹󠁿 Flag for Divača (SI-019) 🏴󠁳󠁩󠀰󠀳󠀶󠁿 Flag for Idrija (SI-036) 🏴󠁳󠁩󠀰󠀴󠀵󠁿 Flag for Kidričevo (SI-045) 🏴󠁳󠁩󠀰󠀴󠀶󠁿 Flag for Kobarid (SI-046) 🏴󠁳󠁩󠀰󠀴󠀷󠁿 Flag for Kobilje (SI-047) 🏴󠁳󠁩󠀰󠀵󠀰󠁿 Flag for Koper (SI-050) 🏴󠁳󠁩󠀰󠀳󠀷󠁿 Flag for Ig (SI-037) 🏴󠁳󠁩󠀰󠀵󠀵󠁿 Flag for Kungota (SI-055) 🏴󠁳󠁩󠀰󠀳󠀲󠁿 Flag for Grosuplje (SI-032) 🏴󠁳󠁩󠀰󠀲󠀱󠁿 Flag for Dobrova–Polhov Gradec (SI-021) 🏴󠁳󠁩󠀰󠀴󠀲󠁿 Flag for Juršinci (SI-042) 🏴󠁳󠁩󠀰󠀵󠀴󠁿 Flag for Krško (SI-054) 🏴󠁳󠁩󠀰󠀳󠀳󠁿 Flag for Šalovci (SI-033) 🏴󠁳󠁩󠀰󠀵󠀳󠁿 Flag for Kranjska Gora (SI-053) 🏴󠁳󠁩󠀰󠀴󠀸󠁿 Flag for Kočevje (SI-048) 🏴󠁳󠁩󠀰󠀳󠀸󠁿 Flag for Ilirska Bistrica (SI-038) 🏴󠁳󠁩󠀰󠀴󠀳󠁿 Flag for Kamnik (SI-043) 🏴󠁳󠁩󠀰󠀳󠀵󠁿 Flag for Hrpelje–Kozina (SI-035) 🏴󠁳󠁩󠀰󠀳󠀰󠁿 Flag for Gornji Grad (SI-030) 🏴󠁳󠁩󠀰󠀴󠀴󠁿 Flag for Kanal (SI-044) 🏴󠁳󠁩󠀰󠀲󠀲󠁿 Flag for Dol pri Ljubljani (SI-022) 🏴󠁳󠁩󠀰󠀸󠀹󠁿 Flag for Pesnica (SI-089) 🏴󠁳󠁩󠀰󠀹󠀰󠁿 Flag for Piran (SI-090) 🏴󠁳󠁩󠀰󠀷󠀴󠁿 Flag for Mežica (SI-074) 🏴󠁳󠁩󠀰󠀸󠀱󠁿 Flag for Muta (SI-081) 🏴󠁳󠁩󠀰󠀶󠀲󠁿 Flag for Ljubno (SI-062) 🏴󠁳󠁩󠀰󠀸󠀷󠁿 Flag for Ormož (SI-087) 🏴󠁳󠁩󠀰󠀹󠀴󠁿 Flag for Postojna (SI-094) 🏴󠁳󠁩󠀰󠀷󠀶󠁿 Flag for Mislinja (SI-076) 👩🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁳󠁩󠀰󠀶󠀹󠁿 Flag for Majšperk (SI-069) 🏴󠁳󠁩󠀰󠀷󠀲󠁿 Flag for Mengeš (SI-072) 🏴󠁳󠁩󠀰󠀷󠀳󠁿 Flag for Metlika (SI-073) 🏴󠁳󠁩󠀰󠀷󠀷󠁿 Flag for Moravče (SI-077) 🏴󠁳󠁩󠀰󠀷󠀸󠁿 Flag for Moravske Toplice (SI-078) 🏴󠁳󠁩󠀰󠀶󠀱󠁿 Flag for Ljubljana (SI-061) 🏴󠁳󠁩󠀰󠀸󠀰󠁿 Flag for Murska Sobota (SI-080) 🏴󠁳󠁩󠀰󠀸󠀲󠁿 Flag for Naklo (SI-082) 🏴󠁳󠁩󠀰󠀸󠀴󠁿 Flag for Nova Gorica (SI-084) 🏴󠁳󠁩󠀰󠀸󠀸󠁿 Flag for Osilnica (SI-088) 🏴󠁳󠁩󠀰󠀹󠀱󠁿 Flag for Pivka (SI-091) 🏴󠁳󠁩󠀰󠀸󠀳󠁿 Flag for Nazarje (SI-083) 🏴󠁳󠁩󠀰󠀷󠀵󠁿 Flag for Miren–Kostanjevica (SI-075) 🏴󠁳󠁩󠀰󠀶󠀴󠁿 Flag for Logatec (SI-064) 🏴󠁳󠁩󠀰󠀶󠀰󠁿 Flag for Litija (SI-060) 🏴󠁳󠁩󠀰󠀷󠀰󠁿 Flag for Maribor (SI-070) 🏴󠁳󠁩󠀰󠀶󠀳󠁿 Flag for Ljutomer (SI-063) 🏴󠁳󠁩󠀰󠀶󠀶󠁿 Flag for Loški Potok (SI-066) 🏴󠁳󠁩󠀰󠀶󠀷󠁿 Flag for Luče (SI-067) 🏴󠁳󠁩󠀰󠀹󠀲󠁿 Flag for Podčetrtek (SI-092) 🏴󠁳󠁩󠀰󠀹󠀳󠁿 Flag for Podvelka (SI-093) 🏴󠁳󠁩󠀰󠀷󠀱󠁿 Flag for Medvode (SI-071) 🏴󠁳󠁩󠀰󠀶󠀵󠁿 Flag for Loška Dolina (SI-065) 🏴󠁳󠁩󠀰󠀵󠀷󠁿 Flag for Laško (SI-057) 🏴󠁳󠁩󠀰󠀵󠀹󠁿 Flag for Lendava (SI-059) 🏴󠁳󠁩󠀰󠀷󠀹󠁿 Flag for Mozirje (SI-079) 🏴󠁳󠁩󠀰󠀶󠀸󠁿 Flag for Lukovica (SI-068) 🏴󠁳󠁩󠀱󠀳󠀱󠁿 Flag for Tržič (SI-131) 🏴󠁳󠁩󠀱󠀱󠀸󠁿 Flag for Šentilj (SI-118) 🏴󠁳󠁩󠀰󠀹󠀸󠁿 Flag for Rače–Fram (SI-098) 🏴󠁳󠁩󠀰󠀹󠀷󠁿 Flag for Puconci (SI-097) 👩🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁳󠁩󠀱󠀰󠀵󠁿 Flag for Rogašovci (SI-105) 🏴󠁳󠁩󠀱󠀱󠀳󠁿 Flag for Slovenska Bistrica (SI-113) 🏴󠁳󠁩󠀱󠀰󠀷󠁿 Flag for Rogatec (SI-107) 🏴󠁳󠁩󠀰󠀹󠀶󠁿 Flag for Ptuj (SI-096) 🏴󠁳󠁩󠀱󠀱󠀹󠁿 Flag for Šentjernej (SI-119) 🏴󠁳󠁩󠀱󠀱󠀱󠁿 Flag for Sežana (SI-111) 🏴󠁳󠁩󠀱󠀲󠀳󠁿 Flag for Škofljica (SI-123) 🏴󠁳󠁩󠀱󠀱󠀲󠁿 Flag for Slovenj Gradec (SI-112) 🏴󠁳󠁩󠀱󠀱󠀵󠁿 Flag for Starše (SI-115) 🏴󠁳󠁩󠀱󠀱󠀶󠁿 Flag for Sveti Jurij (SI-116) 🏴󠁳󠁩󠀱󠀳󠀰󠁿 Flag for Trebnje (SI-130) 🏴󠁳󠁩󠀱󠀱󠀰󠁿 Flag for Sevnica (SI-110) 🏴󠁳󠁩󠀰󠀹󠀹󠁿 Flag for Radeče (SI-099) 🏴󠁳󠁩󠀱󠀲󠀱󠁿 Flag for Škocjan (SI-121) 🏴󠁳󠁩󠀱󠀲󠀴󠁿 Flag for Šmarje pri Jelšah (SI-124) 🏴󠁳󠁩󠀱󠀲󠀶󠁿 Flag for Šoštanj (SI-126) 🏴󠁳󠁩󠀱󠀲󠀷󠁿 Flag for Štore (SI-127) 🏴󠁳󠁩󠀱󠀰󠀶󠁿 Flag for Rogaška Slatina (SI-106) 🏴󠁳󠁩󠀰󠀹󠀵󠁿 Flag for Preddvor (SI-095) 🏴󠁳󠁩󠀱󠀳󠀲󠁿 Flag for Turnišče (SI-132) 🏴󠁳󠁩󠀱󠀰󠀲󠁿 Flag for Radovljica (SI-102) 🏴󠁳󠁩󠀱󠀰󠀸󠁿 Flag for Ruše (SI-108) 🏴󠁳󠁩󠀱󠀱󠀴󠁿 Flag for Slovenske Konjice (SI-114) 🏴󠁳󠁩󠀱󠀲󠀰󠁿 Flag for Šentjur (SI-120) 🏴󠁳󠁩󠀱󠀲󠀸󠁿 Flag for Tolmin (SI-128) 🏴󠁳󠁩󠀱󠀰󠀴󠁿 Flag for Ribnica (SI-104) 🏴󠁳󠁩󠀱󠀰󠀱󠁿 Flag for Radlje ob Dravi (SI-101) 🏴󠁳󠁩󠀱󠀲󠀹󠁿 Flag for Trbovlje (SI-129) 🏴󠁳󠁩󠀱󠀰󠀹󠁿 Flag for Semič (SI-109) 🏴󠁳󠁩󠀱󠀱󠀷󠁿 Flag for Šenčur (SI-117) 🏴󠁳󠁩󠀱󠀰󠀳󠁿 Flag for Ravne na Koroškem (SI-103) 🏴󠁳󠁩󠀱󠀶󠀹󠁿 Flag for Miklavž na Dravskem Polju (SI-169) 🏴󠁳󠁩󠀱󠀳󠀸󠁿 Flag for Vodice (SI-138) 🏴󠁳󠁩󠀱󠀳󠀳󠁿 Flag for Velenje (SI-133) 🏴󠁳󠁩󠀱󠀴󠀲󠁿 Flag for Zagorje ob Savi (SI-142) 🏴󠁳󠁩󠀱󠀴󠀱󠁿 Flag for Vuzenica (SI-141) 🏴󠁳󠁩󠀱󠀴󠀰󠁿 Flag for Vrhnika (SI-140) 👩🏻‍👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone 🏴󠁳󠁩󠀱󠀴󠀶󠁿 Flag for Železniki (SI-146) 🏴󠁳󠁩󠀱󠀴󠀷󠁿 Flag for Žiri (SI-147) 🏴󠁳󠁩󠀱󠀴󠀸󠁿 Flag for Benedikt (SI-148) 🏴󠁳󠁩󠀱󠀳󠀴󠁿 Flag for Velike Lašče (SI-134) 🏴󠁳󠁩󠀱󠀳󠀷󠁿 Flag for Vitanje (SI-137) 🏴󠁳󠁩󠀱󠀶󠀴󠁿 Flag for Komenda (SI-164) 🏴󠁳󠁩󠀱󠀵󠀵󠁿 Flag for Dobrna (SI-155) 🏴󠁳󠁩󠀱󠀵󠀶󠁿 Flag for Dobrovnik (SI-156) 🏴󠁳󠁩󠀱󠀵󠀷󠁿 Flag for Dolenjske Toplice (SI-157) 🏴󠁳󠁩󠀱󠀵󠀹󠁿 Flag for Hajdina (SI-159) 🏴󠁳󠁩󠀱󠀷󠀱󠁿 Flag for Oplotnica (SI-171) 🏴󠁳󠁩󠀱󠀳󠀵󠁿 Flag for Videm (SI-135) 🏴󠁳󠁩󠀱󠀶󠀳󠁿 Flag for Jezersko (SI-163) 🏴󠁳󠁩󠀱󠀵󠀲󠁿 Flag for Cankova (SI-152) 🏴󠁳󠁩󠀱󠀶󠀵󠁿 Flag for Kostel (SI-165) 🏴󠁳󠁩󠀱󠀶󠀶󠁿 Flag for Križevci (SI-166) 🏴󠁳󠁩󠀱󠀳󠀹󠁿 Flag for Vojnik (SI-139) 🏴󠁳󠁩󠀱󠀶󠀸󠁿 Flag for Markovci (SI-168) 🏴󠁳󠁩󠀱󠀷󠀰󠁿 Flag for Mirna Peč (SI-170) 🏴󠁳󠁩󠀱󠀳󠀶󠁿 Flag for Vipava (SI-136) 🏴󠁳󠁩󠀱󠀶󠀲󠁿 Flag for Horjul (SI-162) 🏴󠁳󠁩󠀱󠀵󠀳󠁿 Flag for Cerkvenjak (SI-153) 🏴󠁳󠁩󠀱󠀵󠀰󠁿 Flag for Bloke (SI-150) 🏴󠁳󠁩󠀱󠀴󠀳󠁿 Flag for Zavrč (SI-143) 🏴󠁳󠁩󠀱󠀴󠀹󠁿 Flag for Bistrica ob Sotli (SI-149) 🏴󠁳󠁩󠀱󠀴󠀴󠁿 Flag for Zreče (SI-144) 🏴󠁳󠁩󠀱󠀶󠀱󠁿 Flag for Hodoš (SI-161) 🏴󠁳󠁩󠀱󠀶󠀰󠁿 Flag for Hoče–Slivnica (SI-160) 🏴󠁳󠁩󠀱󠀵󠀸󠁿 Flag for Grad (SI-158) 🏴󠁳󠁩󠀱󠀷󠀲󠁿 Flag for Podlehnik (SI-172) 🏴󠁳󠁩󠀱󠀹󠀶󠁿 Flag for Cirkulane (SI-196) 🏴󠁳󠁩󠀱󠀷󠀴󠁿 Flag for Prebold (SI-174) 🏴󠁳󠁩󠀱󠀷󠀶󠁿 Flag for Razkrižje (SI-176) 🏴󠁳󠁩󠀱󠀸󠀸󠁿 Flag for Veržej (SI-188) 🏴󠁳󠁩󠀱󠀹󠀰󠁿 Flag for Žalec (SI-190) 🏴󠁳󠁩󠀱󠀸󠀰󠁿 Flag for Solčava (SI-180) 🏴󠁳󠁩󠀱󠀸󠀱󠁿 Flag for Sveta Ana (SI-181) 🏴󠁳󠁩󠀱󠀸󠀳󠁿 Flag for Šempeter–Vrtojba (SI-183) 🏴󠁳󠁩󠀱󠀸󠀵󠁿 Flag for Trnovska Vas (SI-185) 🏴󠁳󠁩󠀱󠀷󠀹󠁿 Flag for Sodražica (SI-179) 🏴󠁳󠁩󠀱󠀹󠀸󠁿 Flag for Makole (SI-198) 🏴󠁳󠁩󠀲󠀰󠀳󠁿 Flag for Straža (SI-203) 🏴󠁳󠁩󠀱󠀷󠀸󠁿 Flag for Selnica ob Dravi (SI-178) 🏴󠁳󠁩󠀱󠀹󠀳󠁿 Flag for Žužemberk (SI-193) 🏴󠁳󠁩󠀱󠀹󠀷󠁿 Flag for Kostanjevica na Krki (SI-197) 🏴󠁳󠁩󠀱󠀷󠀵󠁿 Flag for Prevalje (SI-175) 🏴󠁳󠁩󠀱󠀹󠀴󠁿 Flag for Šmartno pri Litiji (SI-194) 🏴󠁳󠁩󠀱󠀹󠀱󠁿 Flag for Žetale (SI-191) 🏴󠁳󠁩󠀱󠀸󠀹󠁿 Flag for Vransko (SI-189) 🏴󠁳󠁩󠀲󠀰󠀱󠁿 Flag for Renče–Vogrsko (SI-201) 🏴󠁳󠁩󠀲󠀰󠀲󠁿 Flag for Središče ob Dravi (SI-202) 🏴󠁳󠁩󠀱󠀸󠀶󠁿 Flag for Trzin (SI-186) 🏴󠁳󠁩󠀲󠀰󠀴󠁿 Flag for Sveta Trojica v Slovenskih Goricah (SI-204) 🏴󠁳󠁩󠀲󠀰󠀵󠁿 Flag for Sveti Tomaž (SI-205) 🏴󠁳󠁩󠀱󠀷󠀷󠁿 Flag for Ribnica na Pohorju (SI-177) 🏴󠁳󠁩󠀲󠀰󠀷󠁿 Flag for Gorje (SI-207) 🏴󠁳󠁩󠀱󠀸󠀴󠁿 Flag for Tabor (SI-184) 🏴󠁳󠁩󠀱󠀹󠀹󠁿 Flag for Mokronog–Trebelno (SI-199) 🏴󠁳󠁩󠀱󠀷󠀳󠁿 Flag for Polzela (SI-173) 🏴󠁳󠁩󠀲󠀰󠀰󠁿 Flag for Poljčane (SI-200) 🏴󠁳󠁩󠀱󠀹󠀵󠁿 Flag for Apače (SI-195) 🏴󠁳󠁩󠀱󠀸󠀷󠁿 Flag for Velika Polana (SI-187) 🏴󠁳󠁫󠁴󠁡󠁿 Flag for Trnava (SK-TA) 🏴󠁳󠁩󠀲󠀰󠀹󠁿 Flag for Rečica ob Savinji (SI-209) 🏴󠁳󠁭󠀰󠀹󠁿 Flag for Serravalle (SM-09) 🏴󠁳󠁭󠀰󠀲󠁿 Flag for Chiesanuova (SM-02) 🏴󠁳󠁮󠁫󠁡󠁿 Flag for Kaffrine (SN-KA) 🏴󠁳󠁫󠁮󠁩󠁿 Flag for Nitra (SK-NI) 🏴󠁳󠁩󠀲󠀱󠀱󠁿 Flag for Šentrupert (SI-211) 🏴󠁳󠁭󠀰󠀶󠁿 Flag for Borgo Maggiore (SM-06) 🏴󠁳󠁫󠁫󠁩󠁿 Flag for Košice (SK-KI) 🏴󠁳󠁫󠁢󠁣󠁿 Flag for Banská Bystrica (SK-BC) 🏴󠁳󠁭󠀰󠀸󠁿 Flag for Montegiardino (SM-08) 🏴󠁳󠁮󠁤󠁫󠁿 Flag for Dakar (SN-DK) 🏴󠁳󠁫󠁰󠁶󠁿 Flag for Prešov (SK-PV) 🏴󠁳󠁩󠀲󠀱󠀲󠁿 Flag for Mirna (SI-212) 🏴󠁳󠁭󠀰󠀵󠁿 Flag for Fiorentino (SM-05) 🏴󠁳󠁮󠁴󠁨󠁿 Flag for Thiès (SN-TH) 🏴󠁳󠁩󠀲󠀱󠀳󠁿 Flag for Ankaran (SI-213) 🏴󠁳󠁮󠁴󠁣󠁿 Flag for Tambacounda (SN-TC) 🏴󠁳󠁮󠁦󠁫󠁿 Flag for Fatick (SN-FK) 🏴󠁳󠁫󠁴󠁣󠁿 Flag for Trenčín (SK-TC) 🏴󠁳󠁮󠁫󠁬󠁿 Flag for Kaolack (SN-KL) 🏴󠁳󠁭󠀰󠀴󠁿 Flag for Faetano (SM-04) 🏴󠁳󠁫󠁺󠁩󠁿 Flag for Žilina (SK-ZI) 🏴󠁳󠁬󠁳󠁿 Flag for Southern (SL-S) 🏴󠁳󠁮󠁳󠁥󠁿 Flag for Sédhiou (SN-SE) 🏴󠁳󠁫󠁢󠁬󠁿 Flag for Bratislava (SK-BL) 🏴󠁳󠁮󠁤󠁢󠁿 Flag for Diourbel (SN-DB) 🏴󠁳󠁮󠁫󠁥󠁿 Flag for Kédougou (SN-KE) 🏴󠁳󠁬󠁮󠁿 Flag for Northern (SL-N) 🏴󠁳󠁬󠁷󠁿 Flag for Western Area (SL-W) 🏴󠁳󠁮󠁭󠁴󠁿 Flag for Matam (SN-MT) 🏴󠁳󠁬󠁥󠁿 Flag for Eastern (SL-E) 🏴󠁳󠁭󠀰󠀱󠁿 Flag for Acquaviva (SM-01) 🏴󠁳󠁮󠁫󠁤󠁿 Flag for Kolda (SN-KD) 🏴󠁳󠁮󠁳󠁬󠁿 Flag for Saint-Louis (SN-SL) 🏴󠁳󠁭󠀰󠀷󠁿 Flag for San Marino (SM-07) 🏴󠁳󠁮󠁬󠁧󠁿 Flag for Louga (SN-LG) 🏴󠁳󠁭󠀰󠀳󠁿 Flag for Domagnano (SM-03) 🏴󠁳󠁳󠁥󠁥󠁿 Flag for Eastern Equatoria (SS-EE) 🏴󠁳󠁲󠁳󠁡󠁿 Flag for Saramacca (SR-SA) 👩🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁳󠁲󠁭󠁡󠁿 Flag for Marowijne (SR-MA) 🏴󠁳󠁯󠁪󠁤󠁿 Flag for Middle Juba (SO-JD) 🏴󠁳󠁯󠁭󠁵󠁿 Flag for Mudug (SO-MU) 🏴󠁳󠁯󠁳󠁨󠁿 Flag for Lower Shebelle (SO-SH) 🏴󠁳󠁯󠁨󠁩󠁿 Flag for Hiran (SO-HI) 🏴󠁳󠁳󠁥󠁣󠁿 Flag for Central Equatoria (SS-EC) 🏴󠁳󠁮󠁺󠁧󠁿 Flag for Ziguinchor (SN-ZG) 🏴󠁳󠁲󠁣󠁲󠁿 Flag for Coronie (SR-CR) 🏴󠁳󠁯󠁳󠁤󠁿 Flag for Middle Shebelle (SO-SD) 🏴󠁳󠁳󠁮󠁵󠁿 Flag for Upper Nile (SS-NU) 🏴󠁳󠁲󠁷󠁡󠁿 Flag for Wanica (SR-WA) 🏴󠁳󠁯󠁡󠁷󠁿 Flag for Awdal (SO-AW) 🏴󠁳󠁯󠁳󠁡󠁿 Flag for Sanaag (SO-SA) 🏴󠁳󠁯󠁪󠁨󠁿 Flag for Lower Juba (SO-JH) 🏴󠁳󠁳󠁬󠁫󠁿 Flag for Lakes (SS-LK) 🏴󠁳󠁳󠁷󠁲󠁿 Flag for Warrap (SS-WR) 🏴󠁳󠁴󠁰󠁿 Flag for Príncipe (ST-P) 🏴󠁳󠁲󠁳󠁩󠁿 Flag for Sipaliwini (SR-SI) 🏴󠁳󠁳󠁢󠁷󠁿 Flag for Western Bahr el Ghazal (SS-BW) 🏴󠁳󠁳󠁥󠁷󠁿 Flag for Western Equatoria (SS-EW) 🏴󠁳󠁯󠁢󠁲󠁿 Flag for Bari (SO-BR) 🏴󠁳󠁳󠁪󠁧󠁿 Flag for Jonglei (SS-JG) 🏴󠁳󠁲󠁰󠁭󠁿 Flag for Paramaribo (SR-PM) 🏴󠁳󠁲󠁣󠁭󠁿 Flag for Commewijne (SR-CM) 🏴󠁳󠁯󠁧󠁡󠁿 Flag for Galguduud (SO-GA) 🏴󠁳󠁲󠁮󠁩󠁿 Flag for Nickerie (SR-NI) 🏴󠁳󠁲󠁰󠁲󠁿 Flag for Para (SR-PR) 🏴󠁳󠁯󠁷󠁯󠁿 Flag for Woqooyi Galbeed (SO-WO) 🏴󠁳󠁯󠁧󠁥󠁿 Flag for Gedo (SO-GE) 🏴󠁳󠁯󠁢󠁹󠁿 Flag for Bay, Somalia (SO-BY) 🏴󠁳󠁲󠁢󠁲󠁿 Flag for Brokopondo (SR-BR) 🏴󠁳󠁯󠁮󠁵󠁿 Flag for Nugal (SO-NU) 🏴󠁳󠁯󠁴󠁯󠁿 Flag for Togdheer (SO-TO) 🏴󠁳󠁯󠁢󠁫󠁿 Flag for Bakool (SO-BK) 🏴󠁳󠁯󠁳󠁯󠁿 Flag for Sool (SO-SO) 🏴󠁳󠁺󠁨󠁨󠁿 Flag for Hhohho (SZ-HH) 🏴󠁴󠁤󠁥󠁯󠁿 Flag for Ennedi-Ouest (TD-EO) 🏴󠁴󠁤󠁧󠁲󠁿 Flag for Guéra (TD-GR) 🏴󠁳󠁺󠁳󠁨󠁿 Flag for Shiselweni (SZ-SH) 🏴󠁳󠁹󠁤󠁲󠁿 Flag for Daraa (SY-DR) 🏴󠁳󠁹󠁲󠁡󠁿 Flag for Ar-Raqqah (SY-RA) 🏴󠁳󠁶󠁳󠁯󠁿 Flag for Sonsonate (SV-SO) 🏴󠁳󠁶󠁵󠁮󠁿 Flag for La Unión (SV-UN) 🏴󠁳󠁶󠁳󠁭󠁿 Flag for San Miguel (SV-SM) 🏴󠁳󠁶󠁭󠁯󠁿 Flag for Morazán (SV-MO) 🏴󠁳󠁶󠁳󠁳󠁿 Flag for San Salvador (SV-SS) 🏴󠁳󠁹󠁤󠁹󠁿 Flag for Deir ez-Zor (SY-DY) 🏴󠁳󠁶󠁣󠁡󠁿 Flag for Cabañas (SV-CA) 🏴󠁳󠁺󠁬󠁵󠁿 Flag for Lubombo (SZ-LU) 🏴󠁳󠁶󠁣󠁨󠁿 Flag for Chalatenango (SV-CH) 🏴󠁳󠁹󠁲󠁤󠁿 Flag for Rif Dimashq (SY-RD) 🏴󠁳󠁹󠁴󠁡󠁿 Flag for Tartus (SY-TA) 🏴󠁴󠁤󠁢󠁯󠁿 Flag for Borkou (TD-BO) 🏴󠁳󠁺󠁭󠁡󠁿 Flag for Manzini (SZ-MA) 🏴󠁴󠁤󠁢󠁡󠁿 Flag for Batha (TD-BA) 🏴󠁳󠁹󠁨󠁩󠁿 Flag for Homs (SY-HI) 🏴󠁴󠁤󠁥󠁥󠁿 Flag for Ennedi-Est (TD-EE) 🏴󠁴󠁤󠁢󠁧󠁿 Flag for Bahr el Gazel (TD-BG) 🏴󠁴󠁤󠁫󠁡󠁿 Flag for Kanem (TD-KA) 🏴󠁳󠁹󠁨󠁭󠁿 Flag for Hama (SY-HM) 🏴󠁳󠁹󠁬󠁡󠁿 Flag for Latakia (SY-LA) 🏴󠁳󠁹󠁩󠁤󠁿 Flag for Idlib (SY-ID) 🏴󠁳󠁶󠁬󠁩󠁿 Flag for La Libertad (SV-LI) 🏴󠁳󠁹󠁨󠁬󠁿 Flag for Aleppo (SY-HL) 🏴󠁳󠁶󠁡󠁨󠁿 Flag for Ahuachapán (SV-AH) 🏴󠁴󠁤󠁣󠁢󠁿 Flag for Chari-Baguirmi (TD-CB) 🏴󠁳󠁶󠁰󠁡󠁿 Flag for La Paz (SV-PA) 🏴󠁳󠁹󠁳󠁵󠁿 Flag for As-Suwayda (SY-SU) 🏴󠁳󠁹󠁤󠁩󠁿 Flag for Damascus (SY-DI) 🏴󠁳󠁹󠁱󠁵󠁿 Flag for Quneitra (SY-QU) 🏴󠁳󠁹󠁨󠁡󠁿 Flag for Al-Hasakah (SY-HA) 🏴󠁳󠁶󠁳󠁡󠁿 Flag for Santa Ana (SV-SA) 🏴󠁳󠁶󠁣󠁵󠁿 Flag for Cuscatlán (SV-CU) 🏴󠁴󠁤󠁬󠁯󠁿 Flag for Logone Occidental (TD-LO) 🏴󠁴󠁨󠀲󠀲󠁿 Flag for Chanthaburi (TH-22) 🏴󠁴󠁤󠁭󠁥󠁿 Flag for Mayo-Kebbi Est (TD-ME) 🏴󠁴󠁤󠁭󠁣󠁿 Flag for Moyen-Chari (TD-MC) 🏴󠁴󠁤󠁬󠁲󠁿 Flag for Logone Oriental (TD-LR) 🏴󠁴󠁧󠁳󠁿 Flag for Savanes (TG-S) 🏴󠁴󠁨󠀱󠀴󠁿 Flag for Phra Nakhon Si Ayutthaya (TH-14) 🏴󠁴󠁧󠁣󠁿 Flag for Centrale (TG-C) 🏴󠁴󠁨󠀲󠀷󠁿 Flag for Sa Kaeo (TH-27) 🏴󠁴󠁨󠀱󠀲󠁿 Flag for Nonthaburi (TH-12) 🏴󠁴󠁨󠀳󠀱󠁿 Flag for Buri Ram (TH-31) 🏴󠁴󠁨󠀲󠀰󠁿 Flag for Chon Buri (TH-20) 🏴󠁴󠁤󠁳󠁩󠁿 Flag for Sila (TD-SI) 🏴󠁴󠁤󠁬󠁣󠁿 Flag for Lac (TD-LC) 🏴󠁴󠁨󠀲󠀱󠁿 Flag for Rayong (TH-21) 🏴󠁴󠁨󠀲󠀵󠁿 Flag for Prachin Buri (TH-25) 🏴󠁴󠁨󠀳󠀰󠁿 Flag for Nakhon Ratchasima (TH-30) 🏴󠁴󠁧󠁫󠁿 Flag for Kara (TG-K) 🏴󠁴󠁨󠀱󠀵󠁿 Flag for Ang Thong (TH-15) 🏴󠁴󠁨󠀱󠀰󠁿 Flag for Bangkok (TH-10) 🏴󠁴󠁤󠁭󠁡󠁿 Flag for Mandoul (TD-MA) 🏴󠁴󠁨󠀱󠀳󠁿 Flag for Pathum Thani (TH-13) 🏴󠁴󠁨󠀲󠀴󠁿 Flag for Chachoengsao (TH-24) 🏴󠁴󠁨󠀱󠀷󠁿 Flag for Sing Buri (TH-17) 🏴󠁴󠁤󠁭󠁯󠁿 Flag for Mayo-Kebbi Ouest (TD-MO) 🏴󠁴󠁤󠁯󠁤󠁿 Flag for Ouaddaï (TD-OD) 🏴󠁴󠁨󠀳󠀲󠁿 Flag for Surin (TH-32) 🏴󠁴󠁨󠀲󠀶󠁿 Flag for Nakhon Nayok (TH-26) 🏴󠁴󠁤󠁳󠁡󠁿 Flag for Salamat (TD-SA) 🏴󠁴󠁤󠁴󠁡󠁿 Flag for Tandjilé (TD-TA) 🏴󠁴󠁤󠁷󠁦󠁿 Flag for Wadi Fira (TD-WF) 🏴󠁴󠁨󠀱󠀹󠁿 Flag for Saraburi (TH-19) 🏴󠁴󠁨󠀱󠀱󠁿 Flag for Samut Prakan (TH-11) 🏴󠁴󠁤󠁴󠁩󠁿 Flag for Tibesti (TD-TI) 🏴󠁴󠁧󠁰󠁿 Flag for Plateaux (TG-P) 🏴󠁴󠁤󠁮󠁤󠁿 Flag for N’Djamena (TD-ND) 🏴󠁴󠁨󠀱󠀸󠁿 Flag for Chai Nat (TH-18) 🏴󠁴󠁨󠀶󠀲󠁿 Flag for Kamphaeng Phet (TH-62) 🏴󠁴󠁨󠀷󠀲󠁿 Flag for Suphanburi (TH-72) 🏴󠁴󠁨󠀷󠀴󠁿 Flag for Samut Sakhon (TH-74) 🏴󠁴󠁨󠀶󠀷󠁿 Flag for Phetchabun (TH-67) 🏴󠁴󠁨󠀷󠀱󠁿 Flag for Kanchanaburi (TH-71) 🏴󠁴󠁨󠀵󠀴󠁿 Flag for Phrae (TH-54) 🏴󠁴󠁨󠀶󠀳󠁿 Flag for Tak (TH-63) 🏴󠁴󠁨󠀴󠀸󠁿 Flag for Nakhon Phanom (TH-48) 🏴󠁴󠁨󠀵󠀲󠁿 Flag for Lampang (TH-52) 🏴󠁴󠁨󠀵󠀸󠁿 Flag for Mae Hong Son (TH-58) 🏴󠁴󠁨󠀴󠀷󠁿 Flag for Sakon Nakhon (TH-47) 🏴󠁴󠁨󠀵󠀶󠁿 Flag for Phayao (TH-56) 🏴󠁴󠁨󠀴󠀱󠁿 Flag for Udon Thani (TH-41) 🏴󠁴󠁨󠀴󠀹󠁿 Flag for Mukdahan (TH-49) 🏴󠁴󠁨󠀷󠀳󠁿 Flag for Nakhon Pathom (TH-73) 🏴󠁴󠁨󠀵󠀰󠁿 Flag for Chiang Mai (TH-50) 🏴󠁴󠁨󠀴󠀰󠁿 Flag for Khon Kaen (TH-40) 🏴󠁴󠁨󠀳󠀷󠁿 Flag for Amnat Charoen (TH-37) 🏴󠁴󠁨󠀷󠀰󠁿 Flag for Ratchaburi (TH-70) 🏴󠁴󠁨󠀳󠀵󠁿 Flag for Yasothon (TH-35) 🏴󠁴󠁨󠀵󠀱󠁿 Flag for Lamphun (TH-51) 🏴󠁴󠁨󠀴󠀲󠁿 Flag for Loei (TH-42) 🏴󠁴󠁨󠀶󠀰󠁿 Flag for Nakhon Sawan (TH-60) 🏴󠁴󠁨󠀳󠀴󠁿 Flag for Ubon Ratchathani (TH-34) 🏴󠁴󠁨󠀴󠀴󠁿 Flag for Maha Sarakham (TH-44) 🏴󠁴󠁨󠀴󠀵󠁿 Flag for Roi Et (TH-45) 🏴󠁴󠁨󠀴󠀶󠁿 Flag for Kalasin (TH-46) 🏴󠁴󠁨󠀶󠀶󠁿 Flag for Phichit (TH-66) 🏴󠁴󠁨󠀵󠀵󠁿 Flag for Nan (TH-55) 🏴󠁴󠁨󠀶󠀱󠁿 Flag for Uthai Thani (TH-61) 🏴󠁴󠁨󠀳󠀸󠁿 Flag for Bueng Kan (TH-38) 🏴󠁴󠁨󠀳󠀳󠁿 Flag for Si Sa Ket (TH-33) 🏴󠁴󠁨󠀳󠀹󠁿 Flag for Nong Bua Lam Phu (TH-39) 🏴󠁴󠁨󠀵󠀳󠁿 Flag for Uttaradit (TH-53) 🏴󠁴󠁨󠀵󠀷󠁿 Flag for Chiang Rai (TH-57) 🏴󠁴󠁨󠀶󠀴󠁿 Flag for Sukhothai (TH-64) 🏴󠁴󠁨󠀴󠀳󠁿 Flag for Nong Khai (TH-43) 🏴󠁴󠁨󠀶󠀵󠁿 Flag for Phitsanulok (TH-65) 🏴󠁴󠁬󠁥󠁲󠁿 Flag for Ermera (TL-ER) 🏴󠁴󠁬󠁯󠁥󠁿 Flag for Oecusse (TL-OE) 🏴󠁴󠁬󠁬󠁩󠁿 Flag for Liquiçá (TL-LI) 🏴󠁴󠁬󠁡󠁬󠁿 Flag for Aileu (TL-AL) 🏴󠁴󠁭󠁡󠁿 Flag for Ahal (TM-A) 🏴󠁴󠁨󠀸󠀴󠁿 Flag for Surat Thani (TH-84) 🏴󠁴󠁨󠀷󠀶󠁿 Flag for Phetchaburi (TH-76) 🏴󠁴󠁬󠁢󠁯󠁿 Flag for Bobonaro (TL-BO) 🏴󠁴󠁬󠁭󠁴󠁿 Flag for Manatuto (TL-MT) 🏴󠁴󠁪󠁫󠁴󠁿 Flag for Khatlon (TJ-KT) 🏴󠁴󠁬󠁡󠁮󠁿 Flag for Ainaro (TL-AN) 🏴󠁴󠁨󠀸󠀲󠁿 Flag for Phang Nga (TH-82) 🏴󠁴󠁬󠁣󠁯󠁿 Flag for Cova Lima (TL-CO) 🏴󠁴󠁮󠀱󠀱󠁿 Flag for Tunis (TN-11) 🏴󠁴󠁨󠀸󠀵󠁿 Flag for Ranong (TH-85) 🏴󠁴󠁨󠀸󠀰󠁿 Flag for Nakhon Si Thammarat (TH-80) 🏴󠁴󠁨󠀷󠀷󠁿 Flag for Prachuap Khiri Khan (TH-77) 🏴󠁴󠁪󠁤󠁵󠁿 Flag for Dushanbe (TJ-DU) 🏴󠁴󠁨󠀹󠀵󠁿 Flag for Yala (TH-95) 🏴󠁴󠁨󠀹󠀰󠁿 Flag for Songkhla (TH-90) 🏴󠁴󠁭󠁬󠁿 Flag for Lebap (TM-L) 🏴󠁴󠁨󠀹󠀶󠁿 Flag for Narathiwat (TH-96) 🏴󠁴󠁭󠁭󠁿 Flag for Mary (TM-M) 🏴󠁴󠁬󠁭󠁦󠁿 Flag for Manufahi (TL-MF) 👨🏼‍👨🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁴󠁭󠁢󠁿 Flag for Balkan (TM-B) 🏴󠁴󠁬󠁢󠁡󠁿 Flag for Baucau (TL-BA) 🏴󠁴󠁪󠁲󠁡󠁿 Flag for Nohiyahoi Tobei Jumhurí (TJ-RA) 🏴󠁴󠁨󠀹󠀲󠁿 Flag for Trang (TH-92) 🏴󠁴󠁪󠁳󠁵󠁿 Flag for Sughd (TJ-SU) 🏴󠁴󠁬󠁶󠁩󠁿 Flag for Viqueque (TL-VI) 🏴󠁴󠁨󠀹󠀴󠁿 Flag for Pattani (TH-94) 🏴󠁴󠁨󠀸󠀱󠁿 Flag for Krabi (TH-81) 🏴󠁴󠁬󠁤󠁩󠁿 Flag for Dili (TL-DI) 🏴󠁴󠁨󠀸󠀳󠁿 Flag for Phuket (TH-83) 🏴󠁴󠁨󠀹󠀱󠁿 Flag for Satun (TH-91) 🏴󠁴󠁨󠁳󠁿 Flag for Pattaya (TH-S) 🏴󠁴󠁭󠁤󠁿 Flag for Daşoguz (TM-D) 🏴󠁴󠁮󠀴󠀱󠁿 Flag for Kairouan (TN-41) 🏴󠁴󠁮󠀵󠀲󠁿 Flag for Monastir (TN-52) 🏴󠁴󠁲󠀰󠀹󠁿 Flag for Aydın (TR-09) 🏴󠁴󠁮󠀳󠀱󠁿 Flag for Béja (TN-31) 🏴󠁴󠁲󠀰󠀷󠁿 Flag for Antalya (TR-07) 🏴󠁴󠁮󠀲󠀱󠁿 Flag for Nabeul (TN-21) 🏴󠁴󠁮󠀵󠀳󠁿 Flag for Mahdia (TN-53) 🏴󠁴󠁯󠀰󠀲󠁿 Flag for Haʻapai (TO-02) 🏴󠁴󠁲󠀰󠀵󠁿 Flag for Amasya (TR-05) 🏴󠁴󠁲󠀱󠀳󠁿 Flag for Bitlis (TR-13) 🏴󠁴󠁮󠀱󠀲󠁿 Flag for Ariana (TN-12) 🏴󠁴󠁮󠀷󠀳󠁿 Flag for Kebili (TN-73) 🏴󠁴󠁲󠀰󠀱󠁿 Flag for Adana (TR-01) 🏴󠁴󠁯󠀰󠀱󠁿 Flag for ʻEua (TO-01) 🏴󠁴󠁲󠀱󠀲󠁿 Flag for Bingöl (TR-12) 🏴󠁴󠁮󠀸󠀳󠁿 Flag for Tataouine (TN-83) 🏴󠁴󠁲󠀰󠀸󠁿 Flag for Artvin (TR-08) 🏴󠁴󠁮󠀵󠀱󠁿 Flag for Sousse (TN-51) 🏴󠁴󠁮󠀸󠀱󠁿 Flag for Gabès (TN-81) 🏴󠁴󠁲󠀰󠀴󠁿 Flag for Ağrı (TR-04) 🏴󠁴󠁲󠀱󠀱󠁿 Flag for Bilecik (TR-11) 🏴󠁴󠁮󠀳󠀲󠁿 Flag for Jendouba (TN-32) 🏴󠁴󠁯󠀰󠀴󠁿 Flag for Tongatapu (TO-04) 🏴󠁴󠁲󠀰󠀲󠁿 Flag for Adıyaman (TR-02) 🏴󠁴󠁮󠀳󠀳󠁿 Flag for Kef (TN-33) 🏴󠁴󠁮󠀲󠀲󠁿 Flag for Zaghouan (TN-22) 🏴󠁴󠁲󠀱󠀰󠁿 Flag for Balıkesir (TR-10) 🏴󠁴󠁮󠀱󠀳󠁿 Flag for Ben Arous (TN-13) 🏴󠁴󠁯󠀰󠀳󠁿 Flag for Niuas (TO-03) 🏴󠁴󠁮󠀷󠀲󠁿 Flag for Tozeur (TN-72) 🏴󠁴󠁮󠀱󠀴󠁿 Flag for Manouba (TN-14) 🏴󠁴󠁮󠀴󠀲󠁿 Flag for Kasserine (TN-42) 🏴󠁴󠁲󠀱󠀴󠁿 Flag for Bolu (TR-14) 🏴󠁴󠁮󠀳󠀴󠁿 Flag for Siliana (TN-34) 🏴󠁴󠁯󠀰󠀵󠁿 Flag for Vavaʻu (TO-05) 🏴󠁴󠁲󠀰󠀶󠁿 Flag for Ankara (TR-06) 🏴󠁴󠁮󠀶󠀱󠁿 Flag for Sfax (TN-61) 🏴󠁴󠁮󠀴󠀳󠁿 Flag for Sidi Bouzid (TN-43) 🏴󠁴󠁮󠀸󠀲󠁿 Flag for Medenine (TN-82) 🏴󠁴󠁮󠀲󠀳󠁿 Flag for Bizerte (TN-23) 🏴󠁴󠁲󠀲󠀴󠁿 Flag for Erzincan (TR-24) 🏴󠁴󠁲󠀴󠀶󠁿 Flag for Kahramanmaraş (TR-46) 🏴󠁴󠁲󠀳󠀶󠁿 Flag for Kars (TR-36) 🏴󠁴󠁲󠀵󠀱󠁿 Flag for Niğde (TR-51) 🏴󠁴󠁲󠀳󠀸󠁿 Flag for Kayseri (TR-38) 🏴󠁴󠁲󠀴󠀱󠁿 Flag for Kocaeli (TR-41) 🏴󠁴󠁲󠀱󠀸󠁿 Flag for Çankırı (TR-18) 🏴󠁴󠁲󠀴󠀸󠁿 Flag for Muğla (TR-48) 🏴󠁴󠁲󠀴󠀲󠁿 Flag for Konya (TR-42) 🏴󠁴󠁲󠀴󠀴󠁿 Flag for Malatya (TR-44) 🏴󠁴󠁲󠀲󠀹󠁿 Flag for Gümüşhane (TR-29) 🏴󠁴󠁲󠀲󠀲󠁿 Flag for Edirne (TR-22) 🏴󠁴󠁲󠀳󠀹󠁿 Flag for Kırklareli (TR-39) 🏴󠁴󠁲󠀲󠀷󠁿 Flag for Gaziantep (TR-27) 🏴󠁴󠁲󠀵󠀵󠁿 Flag for Samsun (TR-55) 🏴󠁴󠁲󠀲󠀱󠁿 Flag for Diyarbakır (TR-21) 🏴󠁴󠁲󠀱󠀶󠁿 Flag for Bursa (TR-16) 🏴󠁴󠁲󠀱󠀹󠁿 Flag for Çorum (TR-19) 🏴󠁴󠁲󠀵󠀲󠁿 Flag for Ordu (TR-52) 🏴󠁴󠁲󠀴󠀵󠁿 Flag for Manisa (TR-45) 🏴󠁴󠁲󠀲󠀵󠁿 Flag for Erzurum (TR-25) 🏴󠁴󠁲󠀱󠀵󠁿 Flag for Burdur (TR-15) 🏴󠁴󠁲󠀳󠀲󠁿 Flag for Isparta (TR-32) 🏴󠁴󠁲󠀳󠀴󠁿 Flag for Istanbul (TR-34) 🏴󠁴󠁲󠀳󠀰󠁿 Flag for Hakkâri (TR-30) 🏴󠁴󠁲󠀳󠀱󠁿 Flag for Hatay (TR-31) 🏴󠁴󠁲󠀴󠀹󠁿 Flag for Muş (TR-49) 🏴󠁴󠁲󠀳󠀳󠁿 Flag for Mersin (TR-33) 🏴󠁴󠁲󠀵󠀶󠁿 Flag for Siirt (TR-56) 🏴󠁴󠁲󠀵󠀰󠁿 Flag for Nevşehir (TR-50) 🏴󠁴󠁲󠀲󠀳󠁿 Flag for Elazığ (TR-23) 🏴󠁴󠁲󠀲󠀸󠁿 Flag for Giresun (TR-28) 🏴󠁴󠁲󠀲󠀰󠁿 Flag for Denizli (TR-20) 🏴󠁴󠁲󠀴󠀷󠁿 Flag for Mardin (TR-47) 🏴󠁴󠁲󠀳󠀷󠁿 Flag for Kastamonu (TR-37) 🏴󠁴󠁲󠀵󠀴󠁿 Flag for Sakarya (TR-54) 🏴󠁴󠁲󠀴󠀰󠁿 Flag for Kırşehir (TR-40) 🏴󠁴󠁲󠀱󠀷󠁿 Flag for Çanakkale (TR-17) 🏴󠁴󠁲󠀵󠀳󠁿 Flag for Rize (TR-53) 🏴󠁴󠁲󠀲󠀶󠁿 Flag for Eskişehir (TR-26) 🏴󠁴󠁲󠀶󠀵󠁿 Flag for Van (TR-65) 🏴󠁴󠁴󠁰󠁲󠁴󠁿 Flag for Princes Town (TT-PRT) 🏴󠁴󠁴󠁣󠁴󠁴󠁿 Flag for Couva-Tabaquite-Talparo (TT-CTT) 🏴󠁴󠁴󠁴󠁯󠁢󠁿 Flag for Tobago (TT-TOB) 🏴󠁴󠁲󠀶󠀳󠁿 Flag for Şanlıurfa (TR-63) 🏴󠁴󠁴󠁡󠁲󠁩󠁿 Flag for Arima (TT-ARI) 🏴󠁴󠁲󠀶󠀷󠁿 Flag for Zonguldak (TR-67) 🏴󠁴󠁴󠁳󠁩󠁰󠁿 Flag for Siparia (TT-SIP) 🏴󠁴󠁲󠀷󠀵󠁿 Flag for Ardahan (TR-75) 🏴󠁴󠁲󠀷󠀹󠁿 Flag for Kilis (TR-79) 🏴󠁴󠁴󠁰󠁯󠁳󠁿 Flag for Port of Spain (TT-POS) 🏴󠁴󠁲󠀶󠀸󠁿 Flag for Aksaray (TR-68) 🏴󠁴󠁴󠁤󠁭󠁮󠁿 Flag for Diego Martin (TT-DMN) 🏴󠁴󠁲󠀶󠀹󠁿 Flag for Bayburt (TR-69) 🏴󠁴󠁲󠀵󠀹󠁿 Flag for Tekirdağ (TR-59) 🏴󠁴󠁲󠀷󠀲󠁿 Flag for Batman (TR-72) 🏴󠁴󠁴󠁣󠁨󠁡󠁿 Flag for Chaguanas (TT-CHA) 🏴󠁴󠁲󠀸󠀰󠁿 Flag for Osmaniye (TR-80) 🏴󠁴󠁲󠀷󠀷󠁿 Flag for Yalova (TR-77) 🏴󠁴󠁴󠁳󠁪󠁬󠁿 Flag for San Juan-Laventille (TT-SJL) 🏴󠁴󠁲󠀷󠀸󠁿 Flag for Karabük (TR-78) 🏴󠁴󠁲󠀶󠀶󠁿 Flag for Yozgat (TR-66) 🏴󠁴󠁴󠁭󠁲󠁣󠁿 Flag for Mayaro-Rio Claro (TT-MRC) 🏴󠁴󠁲󠀶󠀴󠁿 Flag for Uşak (TR-64) 🏴󠁴󠁲󠀵󠀷󠁿 Flag for Sinop (TR-57) 🏴󠁴󠁴󠁴󠁵󠁰󠁿 Flag for Tunapuna-Piarco (TT-TUP) 🏴󠁴󠁲󠀷󠀴󠁿 Flag for Bartın (TR-74) 🏴󠁴󠁲󠀷󠀱󠁿 Flag for Kırıkkale (TR-71) 🏴󠁴󠁴󠁰󠁥󠁤󠁿 Flag for Penal-Debe (TT-PED) 🏴󠁴󠁲󠀷󠀶󠁿 Flag for Iğdır (TR-76) 🏴󠁴󠁲󠀷󠀳󠁿 Flag for Şırnak (TR-73) 🏴󠁴󠁲󠀶󠀱󠁿 Flag for Trabzon (TR-61) 🏴󠁴󠁴󠁰󠁴󠁦󠁿 Flag for Point Fortin (TT-PTF) 🏴󠁴󠁲󠀶󠀲󠁿 Flag for Tunceli (TR-62) 🏴󠁴󠁲󠀶󠀰󠁿 Flag for Tokat (TR-60) 🏴󠁴󠁲󠀷󠀰󠁿 Flag for Karaman (TR-70) 🏴󠁴󠁴󠁳󠁦󠁯󠁿 Flag for San Fernando (TT-SFO) 🏴󠁴󠁲󠀵󠀸󠁿 Flag for Sivas (TR-58) 🏴󠁴󠁺󠀰󠀷󠁿 Flag for Zanzibar North (TZ-07) 🏴󠁴󠁷󠁣󠁨󠁡󠁿 Flag for Changhua (TW-CHA) 🏴󠁴󠁶󠁶󠁡󠁩󠁿 Flag for Vaitupu (TV-VAI) 🏴󠁴󠁷󠁫󠁨󠁨󠁿 Flag for Kaohsiung (TW-KHH) 🏴󠁴󠁺󠀰󠀹󠁿 Flag for Kilimanjaro (TZ-09) 🏴󠁴󠁷󠁫󠁩󠁮󠁿 Flag for Kinmen (TW-KIN) 🏴󠁴󠁷󠁰󠁥󠁮󠁿 Flag for Penghu (TW-PEN) 🏴󠁴󠁷󠁴󠁮󠁮󠁿 Flag for Tainan (TW-TNN) 🏴󠁴󠁶󠁮󠁫󠁦󠁿 Flag for Nukufetau (TV-NKF) 🏴󠁴󠁺󠀰󠀸󠁿 Flag for Kigoma (TZ-08) 🏴󠁴󠁷󠁴󠁰󠁥󠁿 Flag for Taipei (TW-TPE) 🏴󠁴󠁷󠁰󠁩󠁦󠁿 Flag for Pingtung (TW-PIF) 🏴󠁴󠁷󠁩󠁬󠁡󠁿 Flag for Yilan (TW-ILA) 🏴󠁴󠁷󠁴󠁡󠁯󠁿 Flag for Taoyuan (TW-TAO) 🏴󠁴󠁺󠀰󠀳󠁿 Flag for Dodoma (TZ-03) 🏴󠁴󠁶󠁮󠁵󠁩󠁿 Flag for Nui (TV-NUI) 🏴󠁴󠁶󠁮󠁩󠁴󠁿 Flag for Niutao (TV-NIT) 🏴󠁴󠁺󠀰󠀶󠁿 Flag for North Pemba (TZ-06) 🏴󠁴󠁷󠁮󠁷󠁴󠁿 Flag for New Taipei (TW-NWT) 🏴󠁴󠁺󠀰󠀴󠁿 Flag for Iringa (TZ-04) 🏴󠁴󠁺󠀰󠀵󠁿 Flag for Kagera (TZ-05) 🏴󠁴󠁷󠁹󠁵󠁮󠁿 Flag for Yunlin (TW-YUN) 🏴󠁴󠁷󠁬󠁩󠁥󠁿 Flag for Lienchiang (TW-LIE) 🏴󠁴󠁶󠁮󠁭󠁧󠁿 Flag for Nanumanga (TV-NMG) 🏴󠁴󠁺󠀰󠀲󠁿 Flag for Dar es Salaam (TZ-02) 🏴󠁴󠁶󠁮󠁭󠁡󠁿 Flag for Nanumea (TV-NMA) 🏴󠁴󠁷󠁴󠁴󠁴󠁿 Flag for Taitung (TW-TTT) 🏴󠁴󠁷󠁮󠁡󠁮󠁿 Flag for Nantou (TW-NAN) 🏴󠁴󠁷󠁣󠁹󠁱󠁿 Flag for Chiayi (TW-CYQ) 🏴󠁴󠁺󠀰󠀱󠁿 Flag for Arusha (TZ-01) 🏴󠁴󠁷󠁨󠁵󠁡󠁿 Flag for Hualien (TW-HUA) 🏴󠁴󠁷󠁣󠁹󠁩󠁿 Flag for Chiayi County (TW-CYI) 🏴󠁴󠁷󠁴󠁸󠁧󠁿 Flag for Taichung (TW-TXG) 🏴󠁴󠁷󠁫󠁥󠁥󠁿 Flag for Keelung (TW-KEE) 🏴󠁴󠁷󠁭󠁩󠁡󠁿 Flag for Miaoli (TW-MIA) 🏴󠁵󠁡󠀴󠀳󠁿 Flag for Crimea (UA-43) 🏴󠁴󠁺󠀱󠀲󠁿 Flag for Lindi (TZ-12) 🏴󠁴󠁺󠀲󠀶󠁿 Flag for Manyara (TZ-26) 🏴󠁵󠁡󠀰󠀹󠁿 Flag for Luhanshchyna (UA-09) 🏴󠁴󠁺󠀲󠀰󠁿 Flag for Rukwa (TZ-20) 🏴󠁵󠁡󠀱󠀲󠁿 Flag for Dnipropetrovshchyna (UA-12) 🏴󠁵󠁡󠀰󠀷󠁿 Flag for Volyn (UA-07) 🏴󠁴󠁺󠀲󠀲󠁿 Flag for Shinyanga (TZ-22) 🏴󠁵󠁡󠀰󠀵󠁿 Flag for Vinnychchyna (UA-05) 🏴󠁴󠁺󠀲󠀱󠁿 Flag for Ruvuma (TZ-21) 🏴󠁴󠁺󠀲󠀸󠁿 Flag for Katavi (TZ-28) 🏴󠁵󠁡󠀲󠀳󠁿 Flag for Zaporizhzhya (UA-23) 🏴󠁵󠁡󠀳󠀲󠁿 Flag for Kyivshchyna (UA-32) 🏴󠁴󠁺󠀲󠀳󠁿 Flag for Singida (TZ-23) 🏴󠁴󠁺󠀲󠀴󠁿 Flag for Tabora (TZ-24) 🏴󠁴󠁺󠀱󠀳󠁿 Flag for Mara (TZ-13) 🏴󠁴󠁺󠀲󠀷󠁿 Flag for Geita (TZ-27) 🏴󠁴󠁺󠀳󠀰󠁿 Flag for Simiyu (TZ-30) 🏴󠁵󠁡󠀴󠀸󠁿 Flag for Mykolayivschyna (UA-48) 🏴󠁵󠁡󠀳󠀵󠁿 Flag for Kirovohradschyna (UA-35) 🏴󠁵󠁡󠀵󠀶󠁿 Flag for Rivnenshchyna (UA-56) 🏴󠁵󠁡󠀵󠀳󠁿 Flag for Poltavshchyna (UA-53) 🏴󠁴󠁺󠀱󠀴󠁿 Flag for Mbeya (TZ-14) 🏴󠁴󠁺󠀱󠀸󠁿 Flag for Mwanza (TZ-18) 🏴󠁵󠁡󠀲󠀱󠁿 Flag for Zakarpattia (UA-21) 🏴󠁴󠁺󠀱󠀰󠁿 Flag for South Pemba (TZ-10) 🏴󠁴󠁺󠀱󠀹󠁿 Flag for Pwani (TZ-19) 🏴󠁴󠁺󠀱󠀷󠁿 Flag for Mtwara (TZ-17) 🏴󠁵󠁡󠀴󠀰󠁿 Flag for Sevastopol (UA-40) 🏴󠁵󠁡󠀵󠀱󠁿 Flag for Odeshchyna (UA-51) 🏴󠁵󠁡󠀴󠀶󠁿 Flag for Lvivshchyna (UA-46) 🏴󠁵󠁡󠀱󠀴󠁿 Flag for Donechchyna (UA-14) 🏴󠁵󠁡󠀲󠀶󠁿 Flag for Prykarpattia (UA-26) 🏴󠁴󠁺󠀱󠀵󠁿 Flag for Zanzibar Urban/West (TZ-15) 🏴󠁴󠁺󠀱󠀶󠁿 Flag for Morogoro (TZ-16) 🏴󠁴󠁺󠀲󠀹󠁿 Flag for Njombe (TZ-29) 🏴󠁵󠁡󠀷󠀷󠁿 Flag for Chernivtsi Oblast (UA-77) 🏴󠁵󠁭󠀹󠀵󠁿 Flag for Palmyra Atoll (UM-95) 🏴󠁵󠁳󠁫󠁳󠁿 Flag for Kansas (US-KS) 👨🏽‍👨🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁵󠁳󠁡󠁺󠁿 Flag for Arizona (US-AZ) 🏴󠁵󠁭󠀶󠀷󠁿 Flag for Johnston Atoll (UM-67) 🏴󠁵󠁡󠀷󠀴󠁿 Flag for Chernihivshchyna (UA-74) 🏴󠁵󠁭󠀸󠀴󠁿 Flag for Howland Island (UM-84) 🏴󠁵󠁳󠁧󠁡󠁿 Flag for Georgia (US-GA) 🏴󠁵󠁳󠁨󠁩󠁿 Flag for Hawaii (US-HI) 🏴󠁵󠁭󠀷󠀱󠁿 Flag for Midway Atoll (UM-71) 🏴󠁵󠁳󠁡󠁳󠁿 Flag for American Samoa (US-AS) 🏴󠁵󠁳󠁣󠁴󠁿 Flag for Connecticut (US-CT) 🏴󠁵󠁳󠁩󠁡󠁿 Flag for Iowa (US-IA) 🏴󠁵󠁡󠀶󠀱󠁿 Flag for Ternopilshchyna (UA-61) 🏴󠁵󠁧󠁮󠁿 Flag for Northern (UG-N) 🏴󠁵󠁳󠁧󠁵󠁿 Flag for Guam (US-GU) 🏴󠁵󠁭󠀸󠀱󠁿 Flag for Baker Island (UM-81) 🏴󠁵󠁧󠁥󠁿 Flag for Eastern (UG-E) 🏴󠁵󠁡󠀶󠀵󠁿 Flag for Khersonshchyna (UA-65) 🏴󠁵󠁡󠀵󠀹󠁿 Flag for Sumshchyna (UA-59) 🏴󠁵󠁳󠁩󠁮󠁿 Flag for Indiana (US-IN) 🏴󠁵󠁳󠁡󠁲󠁿 Flag for Arkansas (US-AR) 🏴󠁵󠁳󠁤󠁥󠁿 Flag for Delaware (US-DE) 🏴󠁵󠁡󠀶󠀳󠁿 Flag for Kharkivshchyna (UA-63) 🏴󠁵󠁳󠁡󠁬󠁿 Flag for Alabama (US-AL) 🏴󠁵󠁧󠁷󠁿 Flag for Western (UG-W) 🏴󠁵󠁡󠀶󠀸󠁿 Flag for Khmelnychchyna (UA-68) 🏴󠁵󠁭󠀷󠀶󠁿 Flag for Navassa Island (UM-76) 🏴󠁵󠁭󠀸󠀶󠁿 Flag for Jarvis Island (UM-86) 🏴󠁵󠁳󠁩󠁤󠁿 Flag for Idaho (US-ID) 🏴󠁵󠁭󠀸󠀹󠁿 Flag for Kingman Reef (UM-89) 🏴󠁵󠁳󠁦󠁬󠁿 Flag for Florida (US-FL) 🏴󠁵󠁭󠀷󠀹󠁿 Flag for Wake Island (UM-79) 🏴󠁵󠁳󠁩󠁬󠁿 Flag for Illinois (US-IL) 🏴󠁵󠁳󠁤󠁣󠁿 Flag for Washington DC (US-DC) 🏴󠁵󠁡󠀷󠀱󠁿 Flag for Cherkashchyna (UA-71) 🏴󠁵󠁳󠁮󠁹󠁿 Flag for New York (US-NY) 👨🏾‍👨🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁵󠁳󠁮󠁣󠁿 Flag for North Carolina (US-NC) 🏴󠁵󠁳󠁭󠁳󠁿 Flag for Mississippi (US-MS) 🏴󠁵󠁳󠁭󠁡󠁿 Flag for Massachusetts (US-MA) 🏴󠁵󠁳󠁮󠁶󠁿 Flag for Nevada (US-NV) 🏴󠁵󠁳󠁷󠁩󠁿 Flag for Wisconsin (US-WI) 🏴󠁵󠁳󠁭󠁤󠁿 Flag for Maryland (US-MD) 🏴󠁵󠁳󠁮󠁭󠁿 Flag for New Mexico (US-NM) 🏴󠁵󠁳󠁰󠁲󠁿 Flag for Puerto Rico (US-PR) 🏴󠁵󠁳󠁵󠁭󠁿 Flag for U.S. Outlying Islands (US-UM) 🏴󠁵󠁳󠁷󠁹󠁿 Flag for Wyoming (US-WY) 🏴󠁵󠁳󠁯󠁨󠁿 Flag for Ohio (US-OH) 🏴󠁵󠁳󠁫󠁹󠁿 Flag for Kentucky (US-KY) 🏴󠁵󠁳󠁮󠁪󠁿 Flag for New Jersey (US-NJ) 🏴󠁵󠁳󠁯󠁲󠁿 Flag for Oregon (US-OR) 🏴󠁵󠁳󠁭󠁩󠁿 Flag for Michigan (US-MI) 🏴󠁵󠁳󠁶󠁩󠁿 Flag for U.S. Virgin Islands (US-VI) 🏴󠁵󠁳󠁭󠁯󠁿 Flag for Missouri (US-MO) 🏴󠁵󠁳󠁰󠁡󠁿 Flag for Pennsylvania (US-PA) 🏴󠁵󠁳󠁶󠁡󠁿 Flag for Virginia (US-VA) 🏴󠁵󠁹󠁡󠁲󠁿 Flag for Artigas (UY-AR) 🏴󠁵󠁹󠁣󠁡󠁿 Flag for Canelones (UY-CA) 🏴󠁵󠁳󠁷󠁡󠁿 Flag for Washington (US-WA) 🏴󠁵󠁳󠁳󠁣󠁿 Flag for South Carolina (US-SC) 🏴󠁵󠁳󠁭󠁥󠁿 Flag for Maine (US-ME) 🏴󠁵󠁳󠁬󠁡󠁿 Flag for Louisiana (US-LA) 🏴󠁵󠁳󠁭󠁮󠁿 Flag for Minnesota (US-MN) 🏴󠁵󠁳󠁲󠁩󠁿 Flag for Rhode Island (US-RI) 🏴󠁵󠁳󠁷󠁶󠁿 Flag for West Virginia (US-WV) 🏴󠁵󠁳󠁴󠁸󠁿 Flag for Texas (US-TX) 🏴󠁵󠁳󠁵󠁴󠁿 Flag for Utah (US-UT) 🏴󠁵󠁳󠁯󠁫󠁿 Flag for Oklahoma (US-OK) 🏴󠁵󠁳󠁮󠁨󠁿 Flag for New Hampshire (US-NH) 🏴󠁵󠁺󠁳󠁡󠁿 Flag for Samarqand (UZ-SA) 🏴󠁵󠁹󠁭󠁡󠁿 Flag for Maldonado (UY-MA) 🏴󠁵󠁺󠁮󠁧󠁿 Flag for Namangan (UZ-NG) 🏴󠁶󠁣󠀰󠀱󠁿 Flag for Charlotte (VC-01) 🏴󠁵󠁹󠁳󠁡󠁿 Flag for Salto (UY-SA) 🏴󠁵󠁹󠁣󠁬󠁿 Flag for Cerro Largo (UY-CL) 🏴󠁵󠁹󠁴󠁡󠁿 Flag for Tacuarembó (UY-TA) 🏴󠁶󠁥󠁡󠁿 Flag for Capital (VE-A) 🏴󠁶󠁥󠁢󠁿 Flag for Anzoátegui (VE-B) 🏴󠁶󠁣󠀰󠀲󠁿 Flag for Saint Andrew (VC-02) 🏴󠁵󠁹󠁳󠁯󠁿 Flag for Soriano (UY-SO) 🏴󠁵󠁹󠁲󠁯󠁿 Flag for Rocha (UY-RO) 🏴󠁶󠁣󠀰󠀳󠁿 Flag for Saint David (VC-03) 🏴󠁵󠁹󠁳󠁪󠁿 Flag for San José (UY-SJ) 🏴󠁵󠁹󠁦󠁤󠁿 Flag for Florida (UY-FD) 🏴󠁵󠁹󠁣󠁯󠁿 Flag for Colonia (UY-CO) 🏴󠁵󠁹󠁦󠁳󠁿 Flag for Flores (UY-FS) 🏴󠁵󠁺󠁸󠁯󠁿 Flag for Xorazm (UZ-XO) 🏴󠁵󠁹󠁤󠁵󠁿 Flag for Durazno (UY-DU) 🏴󠁵󠁺󠁡󠁮󠁿 Flag for Andijan (UZ-AN) 🏴󠁶󠁥󠁤󠁿 Flag for Aragua (VE-D) 🏴󠁵󠁺󠁳󠁩󠁿 Flag for Sirdaryo (UZ-SI) 🏴󠁵󠁹󠁰󠁡󠁿 Flag for Paysandú (UY-PA) 🏴󠁶󠁣󠀰󠀶󠁿 Flag for Grenadines (VC-06) 🏴󠁵󠁹󠁲󠁶󠁿 Flag for Rivera (UY-RV) 🏴󠁵󠁹󠁬󠁡󠁿 Flag for Lavalleja (UY-LA) 🏴󠁵󠁺󠁳󠁵󠁿 Flag for Surxondaryo (UZ-SU) 🏴󠁵󠁺󠁴󠁯󠁿 Flag for Tashkent Province (UZ-TO) 🏴󠁵󠁺󠁱󠁡󠁿 Flag for Qashqadaryo (UZ-QA) 🏴󠁵󠁹󠁴󠁴󠁿 Flag for Treinta y Tres (UY-TT) 🏴󠁵󠁹󠁭󠁯󠁿 Flag for Montevideo (UY-MO) 🏴󠁵󠁺󠁢󠁵󠁿 Flag for Bukhara (UZ-BU) 🏴󠁵󠁺󠁦󠁡󠁿 Flag for Fergana (UZ-FA) 🏴󠁵󠁺󠁱󠁲󠁿 Flag for Karakalpakstan (UZ-QR) 🏴󠁵󠁺󠁪󠁩󠁿 Flag for Jizzakh (UZ-JI) 🏴󠁵󠁹󠁲󠁮󠁿 Flag for Río Negro (UY-RN) 🏴󠁵󠁺󠁴󠁫󠁿 Flag for Tashkent (UZ-TK) 🏴󠁶󠁣󠀰󠀵󠁿 Flag for Saint Patrick (VC-05) 🏴󠁵󠁺󠁮󠁷󠁿 Flag for Navoiy (UZ-NW) 🏴󠁶󠁥󠁫󠁿 Flag for Lara (VE-K) 🏴󠁶󠁥󠁯󠁿 Flag for Nueva Esparta (VE-O) 🏴󠁶󠁥󠁳󠁿 Flag for Táchira (VE-S) 🏴󠁶󠁥󠁦󠁿 Flag for Bolívar (VE-F) 🏴󠁶󠁮󠀲󠀱󠁿 Flag for Thanh Hóa (VN-21) 🏴󠁶󠁮󠀱󠀴󠁿 Flag for Hòa Bình (VN-14) 🏴󠁶󠁥󠁪󠁿 Flag for Guárico (VE-J) 🏴󠁶󠁥󠁨󠁿 Flag for Cojedes (VE-H) 🏴󠁶󠁮󠀲󠀶󠁿 Flag for Thừa Thiên–Huế (VN-26) 🏴󠁶󠁥󠁰󠁿 Flag for Portuguesa (VE-P) 🏴󠁶󠁮󠀱󠀸󠁿 Flag for Ninh Bình (VN-18) 🏴󠁶󠁥󠁲󠁿 Flag for Sucre (VE-R) 🏴󠁶󠁮󠀰󠀱󠁿 Flag for Lai Châu (VN-01) 🏴󠁶󠁮󠀰󠀹󠁿 Flag for Lạng Sơn (VN-09) 🏴󠁶󠁥󠁭󠁿 Flag for Miranda (VE-M) 🏴󠁶󠁮󠀲󠀴󠁿 Flag for Quảng Bình (VN-24) 🏴󠁶󠁥󠁥󠁿 Flag for Barinas (VE-E) 🏴󠁶󠁥󠁮󠁿 Flag for Monagas (VE-N) 🏴󠁶󠁮󠀲󠀲󠁿 Flag for Nghệ An (VN-22) 🏴󠁶󠁮󠀰󠀲󠁿 Flag for Lào Cai (VN-02) 🏴󠁶󠁮󠀰󠀷󠁿 Flag for Tuyên Quang (VN-07) 🏴󠁶󠁮󠀰󠀵󠁿 Flag for Sơn La (VN-05) 🏴󠁶󠁮󠀲󠀰󠁿 Flag for Thái Bình (VN-20) 🏴󠁶󠁥󠁷󠁿 Flag for Federal Dependencies (VE-W) 🏴󠁶󠁮󠀲󠀹󠁿 Flag for Quảng Ngãi (VN-29) 🏴󠁶󠁥󠁬󠁿 Flag for Mérida (VE-L) 🏴󠁶󠁥󠁩󠁿 Flag for Falcón (VE-I) 🏴󠁶󠁮󠀰󠀴󠁿 Flag for Cao Bằng (VN-04) 🏴󠁶󠁥󠁺󠁿 Flag for Amazonas (VE-Z) 🏴󠁶󠁮󠀰󠀶󠁿 Flag for Yên Bái (VN-06) 🏴󠁶󠁮󠀲󠀳󠁿 Flag for Hà Tĩnh (VN-23) 🏴󠁶󠁮󠀲󠀸󠁿 Flag for Kon Tum (VN-28) 🏴󠁶󠁥󠁸󠁿 Flag for Vargas (VE-X) 🏴󠁶󠁥󠁵󠁿 Flag for Yaracuy (VE-U) 🏴󠁶󠁥󠁴󠁿 Flag for Trujillo (VE-T) 🏴󠁶󠁮󠀱󠀳󠁿 Flag for Quảng Ninh (VN-13) 🏴󠁶󠁮󠀰󠀳󠁿 Flag for Hà Giang (VN-03) 🏴󠁶󠁮󠀲󠀷󠁿 Flag for Quảng Nam (VN-27) 🏴󠁶󠁮󠀵󠀶󠁿 Flag for Bắc Ninh (VN-56) 🏴󠁶󠁮󠀳󠀶󠁿 Flag for Ninh Thuận (VN-36) 🏴󠁶󠁮󠀶󠀹󠁿 Flag for Thái Nguyên (VN-69) 🏴󠁶󠁮󠀶󠀷󠁿 Flag for Nam Định (VN-67) 🏴󠁶󠁮󠀳󠀵󠁿 Flag for Lâm Đồng (VN-35) 🏴󠁶󠁮󠀶󠀱󠁿 Flag for Hải Dương (VN-61) 🏴󠁶󠁮󠀵󠀲󠁿 Flag for Sóc Trăng (VN-52) 🏴󠁶󠁮󠀷󠀳󠁿 Flag for Hậu Giang (VN-73) 🏴󠁶󠁮󠀷󠀰󠁿 Flag for Vĩnh Phúc (VN-70) 🏴󠁶󠁮󠀵󠀰󠁿 Flag for Bến Tre (VN-50) 🏴󠁶󠁮󠀵󠀳󠁿 Flag for Bắc Kạn (VN-53) 🏴󠁶󠁮󠀵󠀴󠁿 Flag for Bắc Giang (VN-54) 🏴󠁶󠁮󠀳󠀳󠁿 Flag for Đắk Lắk (VN-33) 🏴󠁶󠁮󠀵󠀷󠁿 Flag for Bình Dương (VN-57) 🏴󠁶󠁮󠁤󠁮󠁿 Flag for Da Nang (VN-DN) 🏴󠁶󠁮󠀴󠀶󠁿 Flag for Tiền Giang (VN-46) 🏴󠁶󠁮󠀴󠀳󠁿 Flag for Bà Rịa–Vũng Tàu (VN-43) 🏴󠁶󠁮󠀷󠀱󠁿 Flag for Điện Biên (VN-71) 🏴󠁶󠁮󠀵󠀸󠁿 Flag for Bình Phước (VN-58) 🏴󠁶󠁮󠁣󠁴󠁿 Flag for Can Tho (VN-CT) 🏴󠁶󠁮󠀵󠀵󠁿 Flag for Bạc Liêu (VN-55) 🏴󠁶󠁮󠀳󠀲󠁿 Flag for Phú Yên (VN-32) 🏴󠁶󠁮󠀴󠀴󠁿 Flag for An Giang (VN-44) 🏴󠁶󠁮󠀶󠀳󠁿 Flag for Hà Nam (VN-63) 🏴󠁶󠁮󠀵󠀹󠁿 Flag for Cà Mau (VN-59) 🏴󠁶󠁮󠀴󠀷󠁿 Flag for Kiên Giang (VN-47) 🏴󠁶󠁮󠀳󠀴󠁿 Flag for Khánh Hòa (VN-34) 🏴󠁶󠁮󠀴󠀵󠁿 Flag for Đồng Tháp (VN-45) 🏴󠁶󠁮󠀳󠀹󠁿 Flag for Đồng Nai (VN-39) 🏴󠁶󠁮󠁨󠁮󠁿 Flag for Hanoi (VN-HN) 🏴󠁶󠁮󠀴󠀹󠁿 Flag for Vĩnh Long (VN-49) 🏴󠁶󠁮󠀶󠀸󠁿 Flag for Phú Thọ (VN-68) 🏴󠁶󠁮󠀳󠀷󠁿 Flag for Tây Ninh (VN-37) 🏴󠁶󠁮󠀳󠀰󠁿 Flag for Gia Lai (VN-30) 🏴󠁶󠁮󠀷󠀲󠁿 Flag for Đắk Nông (VN-72) 🏴󠁶󠁮󠀴󠀰󠁿 Flag for Bình Thuận (VN-40) 🏴󠁶󠁮󠀴󠀱󠁿 Flag for Long An (VN-41) 🏴󠁶󠁮󠀳󠀱󠁿 Flag for Bình Định (VN-31) 🏴󠁷󠁦󠁵󠁶󠁿 Flag for Uvea (WF-UV) 🏴󠁹󠁥󠁳󠁤󠁿 Flag for Sa’dah (YE-SD) 🏴󠁹󠁥󠁡󠁢󠁿 Flag for Abyan (YE-AB) 🏴󠁹󠁥󠁨󠁪󠁿 Flag for Hajjah (YE-HJ) 🏴󠁶󠁵󠁭󠁡󠁰󠁿 Flag for Malampa (VU-MAP) 🏴󠁷󠁳󠁡󠁴󠁿 Flag for Atua (WS-AT) 🏴󠁷󠁳󠁶󠁦󠁿 Flag for Va’a-o-Fonoti (WS-VF) 🏴󠁹󠁥󠁨󠁵󠁿 Flag for Al Hudaydah (YE-HU) 🏴󠁷󠁳󠁰󠁡󠁿 Flag for Palauli (WS-PA) 🏴󠁷󠁳󠁳󠁡󠁿 Flag for Satupa’itea (WS-SA) 🏴󠁹󠁥󠁤󠁡󠁿 Flag for Dhale (YE-DA) 🏴󠁭󠁬󠀶󠁿 Flag for Tombouctou (ML-6) 🏴󠁹󠁥󠁲󠁡󠁿 Flag for Raymah (YE-RA) 🏴󠁶󠁵󠁳󠁡󠁭󠁿 Flag for Sanma (VU-SAM) 🏴󠁷󠁦󠁡󠁬󠁿 Flag for Alo (WF-AL) 🏴󠁹󠁥󠁭󠁲󠁿 Flag for Al Mahrah (YE-MR) 👨🏻‍👨🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone 🏴󠁹󠁥󠁡󠁤󠁿 Flag for ’Adan (YE-AD) 🏴󠁹󠁥󠁳󠁨󠁿 Flag for Shabwah (YE-SH) 🏴󠁶󠁵󠁴󠁡󠁥󠁿 Flag for Tafea (VU-TAE) 🏴󠁹󠁥󠁡󠁭󠁿 Flag for Amran (YE-AM) 🏴󠁶󠁵󠁰󠁡󠁭󠁿 Flag for Penama (VU-PAM) 🏴󠁹󠁥󠁭󠁷󠁿 Flag for Al Mahwit (YE-MW) 🏴󠁷󠁳󠁧󠁥󠁿 Flag for Gaga’emauga (WS-GE) 🏴󠁹󠁥󠁨󠁤󠁿 Flag for Hadramaut (YE-HD) 🏴󠁷󠁳󠁡󠁬󠁿 Flag for Aiga-i-le-Tai (WS-AL) 🏴󠁹󠁥󠁭󠁡󠁿 Flag for Ma’rib (YE-MA) 🏴󠁹󠁥󠁢󠁡󠁿 Flag for Al Bayda (YE-BA) 🏴󠁶󠁮󠁨󠁰󠁿 Flag for Haiphong (VN-HP) 🏴󠁷󠁳󠁡󠁡󠁿 Flag for A’ana (WS-AA) 🏴󠁷󠁦󠁳󠁧󠁿 Flag for Sigave (WF-SG) 🏴󠁹󠁥󠁬󠁡󠁿 Flag for Lahij (YE-LA) 🏴󠁶󠁵󠁳󠁥󠁥󠁿 Flag for Shefa (VU-SEE) 🏴󠁹󠁥󠁩󠁢󠁿 Flag for Ibb (YE-IB) 🏴󠁶󠁵󠁴󠁯󠁢󠁿 Flag for Torba (VU-TOB) 🏴󠁹󠁥󠁪󠁡󠁿 Flag for Al Jawf (YE-JA) 🏴󠁷󠁳󠁴󠁵󠁿 Flag for Tuamasaga (WS-TU) 🏴󠁹󠁥󠁤󠁨󠁿 Flag for Dhamar (YE-DH) 🏴󠁺󠁡󠁷󠁣󠁿 Flag for Western Cape (ZA-WC) 🏴󠁹󠁥󠁳󠁵󠁿 Flag for Arkhabil Suqutra (YE-SU) 🏴󠁺󠁷󠁭󠁮󠁿 Flag for Matabeleland North (ZW-MN) 🏴󠁺󠁷󠁭󠁥󠁿 Flag for Mashonaland East (ZW-ME) 🏴󠁺󠁭󠀰󠀶󠁿 Flag for North-Western (ZM-06) 🏴󠁹󠁥󠁳󠁮󠁿 Flag for Sana’a (YE-SN) 🏴󠁺󠁡󠁬󠁰󠁿 Flag for Limpopo (ZA-LP) 🏴󠁺󠁭󠀰󠀳󠁿 Flag for Eastern (ZM-03) 🏴󠁺󠁷󠁭󠁩󠁿 Flag for Midlands (ZW-MI) 🏴󠁺󠁷󠁢󠁵󠁿 Flag for Bulawayo (ZW-BU) 🏴󠁺󠁭󠀰󠀵󠁿 Flag for Northern (ZM-05) 🏴󠁺󠁭󠀰󠀷󠁿 Flag for Southern (ZM-07) 🏴󠁺󠁡󠁦󠁳󠁿 Flag for Free (ZA-FS) 🏴󠁺󠁷󠁭󠁳󠁿 Flag for Matabeleland South (ZW-MS) 🏴󠁺󠁡󠁥󠁣󠁿 Flag for Eastern Cape (ZA-EC) 🏴󠁺󠁭󠀰󠀱󠁿 Flag for Western (ZM-01) 👨🏼‍👨🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁺󠁭󠀰󠀸󠁿 Flag for Copperbelt (ZM-08) 🏴󠁺󠁡󠁮󠁷󠁿 Flag for North West (ZA-NW) 🏴󠁺󠁭󠀱󠀰󠁿 Flag for Muchinga (ZM-10) 🏴󠁺󠁡󠁧󠁴󠁿 Flag for Gauteng (ZA-GT) 🏴󠁺󠁭󠀰󠀹󠁿 Flag for Lusaka (ZM-09) 🏴󠁺󠁭󠀰󠀲󠁿 Flag for Central (ZM-02) 🏴󠁺󠁡󠁮󠁣󠁿 Flag for Northern Cape (ZA-NC) 🏴󠁺󠁡󠁭󠁰󠁿 Flag for Mpumalanga (ZA-MP) 🏴󠁹󠁥󠁴󠁡󠁿 Flag for Taiz (YE-TA) 🏴󠁺󠁡󠁮󠁬󠁿 Flag for KwaZulu-Natal (ZA-NL) 🏴󠁺󠁷󠁭󠁡󠁿 Flag for Manicaland (ZW-MA) 🏴󠁺󠁷󠁭󠁶󠁿 Flag for Masvingo (ZW-MV) 🏴󠁺󠁭󠀰󠀴󠁿 Flag for Luapula (ZM-04) 🏴󠁺󠁷󠁭󠁷󠁿 Flag for Mashonaland West (ZW-MW) 🏴󠁺󠁷󠁨󠁡󠁿 Flag for Harare (ZW-HA) 👨🏽‍👨🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone 👨🏾‍👨🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁦󠁲󠁰󠁤󠁬󠁿 Flag for Pays-de-la-Loire (FR-PDL) 🏴󠁬󠁴󠀲󠀰󠁿 Flag for Klaipėdos Municipality (LT-20) 🏴󠁧󠁲󠁭󠁿 Flag for Crete (GR-M) 󠁸 Tag Latin Small Letter X 🏴󠁩󠁲󠀲󠀱󠁿 Flag for Mazandaran (IR-21) 🏴󠁲󠁵󠁰󠁲󠁩󠁿 Flag for Primorsky Krai (RU-PRI) 🏴󠁪󠁰󠀰󠀷󠁿 Flag for Fukushima (JP-07) 🏴󠁣󠁡󠁭󠁢󠁿 Flag for Manitoba (CA-MB) 👨🏻‍👨🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👩🏻‍❤️‍👩🏻 Couple With Heart - Woman: Light Skin Tone, Woman: Light Skin Tone 🏴󠁣󠁡󠁱󠁣󠁿 Flag for Quebec (CA-QC) 👨‍👩‍👶 Family: Man, Woman, Baby 🏴󠁮󠁡󠁫󠁥󠁿 Flag for Kavango East (NA-KE) 🏴󠁭󠁸󠁳󠁬󠁰󠁿 Flag for San Luis Potosí (MX-SLP) 🏴󠁥󠁥󠀵󠀹󠁿 Flag for Lääne-Viru (EE-59) 🏴󠁬󠁲󠁢󠁧󠁿 Flag for Bong (LR-BG) 🏴󠁰󠁳󠁤󠁥󠁢󠁿 Flag for Deir al-Balah (PS-DEB) 👨🏿‍👨🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁪󠁭󠀰󠀳󠁿 Flag for Saint Thomas (JM-03) 🏴󠁰󠁷󠀱󠀰󠀰󠁿 Flag for Kayangel (PW-100) 🏴󠁣󠁧󠀱󠀲󠁿 Flag for Pool (CG-12) 👨‍❤️‍👨🏾 Couple With Heart - Man, Man: Medium-Dark Skin Tone 🏴󠁥󠁳󠁩󠁢󠁿 Flag for Balearic Islands (ES-IB) 👩‍👨‍👦 Family: Woman, Man, Boy 🏴󠁦󠁩󠀱󠀸󠁿 Flag for Uusimaa (FI-18) 👨🏻‍👩🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁲󠁣󠁥󠁿 Flag for Ceará (BR-CE) 👨‍👩‍👦‍👶 Family: Man, Woman, Boy, Baby 👨🏻‍👨🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁭󠁫󠀲󠀵󠁿 Flag for Demir Hisar (MK-25) 🏴󠁣󠁬󠁡󠁮󠁿 Flag for Antofagasta (CL-AN) 🏴󠁢󠁢󠀰󠀱󠁿 Flag for Christ Church (BB-01) 🏴󠁥󠁥󠀳󠀷󠁿 Flag for Harju (EE-37) 👨🏿‍❤️‍💋‍👩🏽 Kiss - Man: Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁮󠁲󠀱󠀴󠁿 Flag for Yaren (NR-14) 👩‍❤️‍👩🏻 Couple With Heart - Woman, Woman: Light Skin Tone 🏴󠁭󠁹󠀱󠀰󠁿 Flag for Selangor (MY-10) 👨🏼‍👨🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁰󠁥󠁡󠁰󠁵󠁿 Flag for Apurímac (PE-APU) 👩‍👨‍👦‍👧 Family: Woman, Man, Boy, Girl 👨🏿‍👩🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁧󠁥󠁡󠁢󠁿 Flag for Abkhazia (GE-AB) 🏴󠁬󠁩󠀰󠀸󠁿 Flag for Schellenberg (LI-08) 🏴󠁴󠁲󠀸󠀱󠁿 Flag for Düzce (TR-81) 👩🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩‍👨‍👶‍👦 Family: Woman, Man, Baby, Boy 🏴󠁭󠁸󠁳󠁯󠁮󠁿 Flag for Sonora (MX-SON) 🏴󠁣󠁩󠁳󠁭󠁿 Flag for Sassandra-Marahoué (CI-SM) 🏴󠁰󠁥󠁡󠁲󠁥󠁿 Flag for Arequipa (PE-ARE) 👩🏽‍❤️‍👩🏼 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁣󠁧󠀱󠀱󠁿 Flag for Bouenza (CG-11) 🏴󠁪󠁭󠀱󠀴󠁿 Flag for Saint Catherine (JM-14) 🏴󠁳󠁩󠀱󠀲󠀲󠁿 Flag for Škofja Loka (SI-122) 👩🏻‍❤️‍💋‍👨🏼 Kiss - Woman: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁴󠁷󠁨󠁳󠁺󠁿 Flag for Hsinchu (TW-HSZ) 👩🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁬󠁫󠀳󠁿 Flag for Southern (LK-3) 👨‍❤️‍💋‍👨🏼 Kiss - Man, Man: Medium-Light Skin Tone 👨🏽‍👨🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁮󠁩󠁬󠁥󠁿 Flag for León (NI-LE) 🏴󠁨󠁲󠀰󠀵󠁿 Flag for Varaždin (HR-05) 🏴󠁣󠁯󠁡󠁮󠁴󠁿 Flag for Antioquia (CO-ANT) 🏴󠁭󠁣󠁳󠁤󠁿 Flag for Sainte-Dévote Chapel (MC-SD) 🏴󠁭󠁫󠀶󠀱󠁿 Flag for Plasnica (MK-61) 👨🏾‍❤️‍👨🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Light Skin Tone 🏴󠁧󠁲󠁧󠁿 Flag for West Greece (GR-G) 🏴󠁭󠁶󠁮󠁯󠁿 Flag for North Province (MV-NO) 👨‍❤️‍👩🏻 Couple With Heart - Man, Woman: Light Skin Tone 🏴󠁶󠁥󠁣󠁿 Flag for Apure (VE-C) ☿️ Mercury 🏴󠁵󠁳󠁭󠁴󠁿 Flag for Montana (US-MT) 👩🏼‍❤️‍👨🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏾‍👨🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁥󠁣󠁥󠁿 Flag for Esmeraldas (EC-E) 🏴󠁤󠁺󠀰󠀸󠁿 Flag for Béchar (DZ-08) 🏴󠁮󠁬󠁮󠁨󠁿 Flag for North Holland (NL-NH) 🏴󠁦󠁲󠁢󠁬󠁿 Flag for St. Barthélemy (FR-BL) 🏴󠁣󠁦󠁵󠁫󠁿 Flag for Ouaka (CF-UK) 🏴󠁳󠁤󠁲󠁳󠁿 Flag for Red Sea (SD-RS) 🏴󠁭󠁸󠁴󠁡󠁢󠁿 Flag for Tabasco (MX-TAB) 🏴󠁣󠁮󠀹󠀲󠁿 Flag for Macau SAR China (CN-92) 🏴󠁨󠁵󠁥󠁧󠁿 Flag for Eger (HU-EG) 🏴󠁲󠁵󠁳󠁥󠁿 Flag for North Ossetia-Alania (RU-SE) 🏴󠁣󠁤󠁥󠁱󠁿 Flag for Équateur (CD-EQ) 👨🏿‍👨🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁥󠁳󠁰󠁶󠁿 Flag for Basque Country (ES-PV) 👨🏽‍❤️‍💋‍👨🏻 Kiss - Man: Medium Skin Tone, Man: Light Skin Tone 🏴󠁴󠁮󠀷󠀱󠁿 Flag for Gafsa (TN-71) 🏴󠁦󠁩󠀰󠀶󠁿 Flag for Tavastia Proper (FI-06) 🏴󠁩󠁲󠀳󠀰󠁿 Flag for Razavi Khorasan (IR-30) 🏴󠁳󠁩󠀱󠀵󠀴󠁿 Flag for Dobje (SI-154) 👨🏼‍❤️‍💋‍👨🏻 Kiss - Man: Medium-Light Skin Tone, Man: Light Skin Tone 🏴󠁧󠁴󠁲󠁥󠁿 Flag for Retalhuleu (GT-RE) 🏴󠁫󠁩󠁬󠁿 Flag for Line Islands (KI-L) 🏴󠁩󠁲󠀰󠀲󠁿 Flag for West Azarbaijan (IR-02) 🏴󠁣󠁯󠁮󠁡󠁲󠁿 Flag for Nariño (CO-NAR) 🏴󠁺󠁷󠁭󠁣󠁿 Flag for Mashonaland Central (ZW-MC) 👨🏻‍❤️‍👨🏻 Couple With Heart - Man: Light Skin Tone, Man: Light Skin Tone 🏴󠁩󠁴󠀴󠀵󠁿 Flag for Emilia-Romagna (IT-45) 🏴󠁥󠁳󠁶󠁣󠁿 Flag for Valencian Community (ES-VC) 🏴󠁴󠁨󠀷󠀵󠁿 Flag for Samut Songkhram (TH-75) 🏴󠁦󠁲󠁩󠁤󠁦󠁿 Flag for Île-de-France (FR-IDF) 🏴󠁬󠁳󠁡󠁿 Flag for Maseru (LS-A) 🏴󠁫󠁥󠀲󠀵󠁿 Flag for Marsabit (KE-25) 🏴󠁤󠁺󠀰󠀱󠁿 Flag for Adrar (DZ-01) 🏴󠁳󠁶󠁵󠁳󠁿 Flag for Usulután (SV-US) 🏴󠁬󠁶󠀰󠀶󠀰󠁿 Flag for Mazsalaca (LV-060) 👩🏻‍❤️‍💋‍👩🏾 Kiss - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁴󠁨󠀳󠀶󠁿 Flag for Chaiyaphum (TH-36) 🏴󠁰󠁨󠀰󠀷󠁿 Flag for Central Visayas (PH-07) 🏴󠁴󠁨󠀸󠀶󠁿 Flag for Chumphon (TH-86) 🏴󠁣󠁩󠁺󠁺󠁿 Flag for Zanzan (CI-ZZ) 🏴󠁥󠁳󠁣󠁬󠁿 Flag for Castile and León (ES-CL) 👨🏻‍👨🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁳󠁡󠀱󠀱󠁿 Flag for Al Bahah (SA-11) 🏴󠁢󠁱󠁳󠁥󠁿 Flag for Sint Eustatius (BQ-SE) 🏴󠁦󠁩󠀰󠀱󠁿 Flag for Åland Islands (FI-01) 🏴󠁣󠁲󠁨󠁿 Flag for Heredia (CR-H) 🏴󠁴󠁲󠀴󠀳󠁿 Flag for Kütahya (TR-43) 🏴󠁷󠁳󠁶󠁳󠁿 Flag for Vaisigano (WS-VS) 👨🏿‍❤️‍💋‍👩🏼 Kiss - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁳󠁩󠀰󠀵󠀲󠁿 Flag for Kranj (SI-052) 🏴󠁶󠁥󠁶󠁿 Flag for Zulia (VE-V) 👩🏽‍❤️‍💋‍👨🏼 Kiss - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁬󠁵󠁣󠁡󠁿 Flag for Capellen (LU-CA) 👩🏽‍❤️‍👩🏾 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍👨🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁧󠁹󠁥󠁢󠁿 Flag for East Berbice-Corentyne (GY-EB) 🏴󠁴󠁨󠀱󠀶󠁿 Flag for Lopburi (TH-16) 🏴󠁭󠁴󠀲󠀵󠁿 Flag for Luqa (MT-25) 👨🏻‍❤️‍👨🏼 Couple With Heart - Man: Light Skin Tone, Man: Medium-Light Skin Tone 👨🏽‍👨🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏻‍❤️‍👩🏽 Couple With Heart - Woman: Light Skin Tone, Woman: Medium Skin Tone 🏴󠁭󠁸󠁢󠁣󠁳󠁿 Flag for Baja California Sur (MX-BCS) 🏴󠁥󠁧󠁢󠁮󠁳󠁿 Flag for Beni Suef (EG-BNS) 🏴󠁴󠁨󠀹󠀳󠁿 Flag for Phatthalung (TH-93) 🏴󠁴󠁺󠀲󠀵󠁿 Flag for Tanga (TZ-25) 🏴󠁭󠁡󠀰󠀴󠁿 Flag for Oriental (MA-04) 👨🏾‍👨🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👩🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁳󠁩󠀰󠀲󠀷󠁿 Flag for Gorenja Vas–Poljane (SI-027) 🏴󠁴󠁴󠁳󠁧󠁥󠁿 Flag for Sangre Grande (TT-SGE) 🏴󠁬󠁶󠀰󠀴󠀶󠁿 Flag for Koknese (LV-046) 🏴󠁳󠁩󠀰󠀸󠀶󠁿 Flag for Odranci (SI-086) 🏴󠁮󠁺󠁮󠁳󠁮󠁿 Flag for Nelson (NZ-NSN) 🏴󠁨󠁵󠁳󠁺󠁿 Flag for Szabolcs-Szatmár-Bereg (HU-SZ) 👩🏾‍❤️‍💋‍👨🏽 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁳󠁩󠀲󠀱󠀰󠁿 Flag for Sveti Jurij v Slovenskih Goricah (SI-210) ߷ NKo Symbol Gbakurunen 🏴󠁮󠁧󠁤󠁥󠁿 Flag for Delta (NG-DE) 🏴󠁭󠁤󠁣󠁳󠁿 Flag for Căușeni (MD-CS) 👩🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁣󠁵󠀹󠀹󠁿 Flag for Isla de la Juventud (CU-99) 🏴󠁫󠁨󠀲󠀰󠁿 Flag for Svay Rieng (KH-20) 🏴󠁴󠁤󠁨󠁬󠁿 Flag for Hadjer-Lamis (TD-HL) 🏴󠁪󠁰󠀲󠀱󠁿 Flag for Gifu (JP-21) 🏴󠁬󠁶󠀰󠀴󠀱󠁿 Flag for Jelgava Municipality (LV-041) 🏴󠁰󠁫󠁴󠁡󠁿 Flag for Federally Administered Tribal Areas (PK-TA) 🏴󠁭󠁴󠀶󠀲󠁿 Flag for Xewkija (MT-62) 🏴󠁭󠁲󠀱󠀰󠁿 Flag for Guidimaka (MR-10) 🏴󠁭󠁫󠀰󠀲󠁿 Flag for Aračinovo (MK-02) 🏴󠁳󠁩󠀲󠀰󠀸󠁿 Flag for Log–Dragomer (SI-208) 🏴󠁳󠁩󠀱󠀲󠀵󠁿 Flag for Šmartno ob Paki (SI-125) 🏴󠁣󠁯󠁤󠁣󠁿 Flag for Capital District (CO-DC) 🏴󠁬󠁶󠀱󠀰󠀶󠁿 Flag for Ventspils Municipality (LV-106) 🏴󠁭󠁶󠁳󠁣󠁿 Flag for South Central Province (MV-SC) 🏴󠁩󠁮󠁡󠁳󠁿 Flag for Assam (IN-AS) 🏴󠁬󠁴󠀰󠀲󠁿 Flag for Alytus Municipality (LT-02) 🏴󠁶󠁮󠀶󠀶󠁿 Flag for Hưng Yên (VN-66) 👨🏻‍👨🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨🏼‍👨🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁧󠁴󠁳󠁭󠁿 Flag for San Marcos (GT-SM) 👨🏼‍👨🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁤󠁥󠁳󠁨󠁿 Flag for Schleswig-Holstein (DE-SH) 👨‍👨‍👶‍👧 Family: Man, Man, Baby, Girl ️ Variation Selector-16 👨🏽‍👨🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👨🏻‍👨🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone 👨🏼‍👨🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏽‍👨🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone 👩‍❤️‍👨🏿 Couple With Heart - Woman, Man: Dark Skin Tone 🏴󠁥󠁳󠁣󠁢󠁿 Flag for Cantabria (ES-CB) 🏴󠁳󠁳󠁵󠁹󠁿 Flag for Unity (SS-UY) 👩🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👨🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏼‍👨🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👩🏽‍👨🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩🏼‍👨🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏻‍👨🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👨🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone 👩🏻‍👨🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏼‍👨🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏻‍👨🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍👨🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👨🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏻‍👨🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍👨🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👨🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍👨🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👨🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👨🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👨🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👨🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏼‍👩🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏻‍👩🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👩🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍👩🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👩🏽‍👩🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👦🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👦🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👦🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏻‍👩🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone 👩🏼‍👩🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍👩🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏽‍👩🏽‍👧🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏻‍👩🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👩🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone 👨🏾‍👩🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏼‍👩🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍👩🏼‍👶🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👧🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👩🏼‍👩🏼‍👶🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👩🏽‍👩🏽‍👶🏽‍👶🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👩🏾‍👶🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩🏿‍👩🏿‍👶🏿‍👶🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 👩🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁩󠁤󠁭󠁬󠁿 Flag for Maluku Islands (ID-ML) 👩🏿‍👶🏿‍👧🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁤󠁫󠀸󠀳󠁿 Flag for Southern Denmark (DK-83) 🏴󠁭󠁫󠀸󠀵󠁿 Flag for Skopje (MK-85) 👨🏼‍❤️‍💋‍👩 Kiss - Man: Medium-Light Skin Tone, Woman 🏴󠁰󠁴󠀰󠀲󠁿 Flag for Beja (PT-02) 🏴󠁩󠁴󠀸󠀸󠁿 Flag for Sardinia (IT-88) 🏴󠁤󠁥󠁢󠁹󠁿 Flag for Bavaria (DE-BY) 🏴󠁰󠁧󠁥󠁢󠁲󠁿 Flag for East New Britain (PG-EBR) 🏴󠁩󠁴󠀳󠀲󠁿 Flag for Trentino-South Tyrol (IT-32) 🏴󠁵󠁳󠁴󠁮󠁿 Flag for Tennessee (US-TN) 🏴󠁣󠁡󠁳󠁫󠁿 Flag for Saskatchewan (CA-SK) 🏴󠁴󠁶󠁦󠁵󠁮󠁿 Flag for Funafuti (TV-FUN) 🏴󠁴󠁪󠁧󠁢󠁿 Flag for Gorno-Badakhshan (TJ-GB) 🏴󠁳󠁯󠁢󠁮󠁿 Flag for Banaadir (SO-BN) 🏴󠁳󠁩󠀱󠀰󠀰󠁿 Flag for Radenci (SI-100) 🏴󠁤󠁥󠁢󠁷󠁿 Flag for Baden-Württemberg (DE-BW) 👩🏿‍👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁶󠁥󠁧󠁿 Flag for Carabobo (VE-G) ‍ Zero Width Joiner 🏴󠁫󠁥󠀳󠀱󠁿 Flag for Nakuru (KE-31) 🏴󠁴󠁧󠁭󠁿 Flag for Maritime (TG-M) 🏴󠁮󠁧󠁢󠁯󠁿 Flag for Borno (NG-BO) 🏴󠁭󠁤󠁳󠁮󠁿 Flag for Transnistria (MD-SN) 🏴󠁩󠁲󠀰󠀷󠁿 Flag for Tehran (IR-07) 🏴󠁲󠁵󠁤󠁡󠁿 Flag for Dagestan (RU-DA) 🏴󠁯󠁭󠁷󠁵󠁿 Flag for Al Wusta (OM-WU) 🏴󠁣󠁺󠀴󠀲󠁿 Flag for Ústecký kraj (CZ-42) 🏴󠁭󠁹󠀱󠀴󠁿 Flag for Kuala Lumpur (MY-14) 🏴󠁰󠁥󠁡󠁹󠁡󠁿 Flag for Ayacucho (PE-AYA) 🏴󠁵󠁡󠀳󠀰󠁿 Flag for Kiev (UA-30) 🏴󠁡󠁧󠀰󠀸󠁿 Flag for Saint Philip (AG-08) 🏴󠁭󠁴󠀲󠀹󠁿 Flag for Mdina (MT-29) 🏴󠁧󠁢󠁮󠁩󠁲󠁿 Flag for Northern Ireland (GB-NIR) 🏴󠁦󠁲󠁡󠁲󠁡󠁿 Flag for Auvergne-Rhône-Alpes (FR-ARA) 🏴󠁭󠁸󠁤󠁵󠁲󠁿 Flag for Durango (MX-DUR) 👨🏼‍👩🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁬󠁫󠀵󠁿 Flag for Eastern (LK-5) 🏴󠁮󠁧󠁯󠁧󠁿 Flag for Ogun (NG-OG) 🏴󠁬󠁹󠁪󠁩󠁿 Flag for Jafara (LY-JI) 🏴󠁳󠁥󠁭󠁿 Flag for Skåne (SE-M) 👨🏽‍👩🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍👩🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁲󠁭󠁳󠁿 Flag for Mato Grosso do Sul (BR-MS) 🏴󠁧󠁴󠁳󠁲󠁿 Flag for Santa Rosa (GT-SR) 👨🏼‍👩🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁳󠁩󠀱󠀵󠀱󠁿 Flag for Braslovče (SI-151) 🏴󠁰󠁴󠀳󠀰󠁿 Flag for Madeira (PT-30) 🏴󠁳󠁶󠁳󠁶󠁿 Flag for San Vicente (SV-SV) 🏴󠁩󠁲󠀳󠀲󠁿 Flag for Alborz (IR-32) 🏴󠁷󠁳󠁦󠁡󠁿 Flag for Fa’asaleleaga (WS-FA) 👨🏼‍👨🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁣󠁡󠁮󠁬󠁿 Flag for Newfoundland and Labrador (CA-NL) 🏴󠁧󠁲󠁪󠁿 Flag for Peloponnese (GR-J) 🏴󠁮󠁬󠁳󠁸󠁿 Flag for Sint Maarten (NL-SX) 🏴󠁭󠁴󠀴󠀸󠁿 Flag for St. Julian’s (MT-48) 🏴󠁮󠁧󠁡󠁤󠁿 Flag for Adamawa (NG-AD) 👩🏿‍👩🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁳󠁴󠁳󠁿 Flag for São Tomé (ST-S) 👩🏻‍👩🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁬󠁶󠀰󠀱󠀰󠁿 Flag for Auce (LV-010) 🏴󠁰󠁨󠀱󠀵󠁿 Flag for Cordillera Administrative (PH-15) 🏴󠁪󠁰󠀱󠀸󠁿 Flag for Fukui (JP-18) 👨🏿‍👩🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁧󠁥󠁫󠁡󠁿 Flag for Kakheti (GE-KA) 🏴󠁫󠁲󠀴󠀹󠁿 Flag for Jeju (KR-49) 🏴󠁭󠁡󠀱󠀳󠁿 Flag for Souss-Massa-Drâa (MA-13) 🏴󠁬󠁶󠀰󠀳󠀷󠁿 Flag for Inčukalns (LV-037) 🏴󠁦󠁲󠁴󠁦󠁿 Flag for French Southern Territories (FR-TF) 🏴󠁭󠁸󠁲󠁯󠁯󠁿 Flag for Quintana Roo (MX-ROO) 👩🏻‍👶🏻‍👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 👨🏾‍👨🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁨󠁵󠁧󠁳󠁿 Flag for Győr-Moson-Sopron (HU-GS) 👩🏿‍👩🏿‍👧🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone 👩🏻‍👩🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone  Shibuya 👩‍❤️‍👨🏽 Couple With Heart - Woman, Man: Medium Skin Tone 🏴󠁷󠁳󠁧󠁩󠁿 Flag for Gaga’ifomauga (WS-GI) 🏴󠁨󠁴󠁮󠁥󠁿 Flag for Nord-Est (HT-NE) 🏴󠁳󠁧󠀰󠀱󠁿 Flag for Central Singapore (SG-01) 🏴󠁥󠁣󠁴󠁿 Flag for Tungurahua (EC-T) # Number Sign 👨🏻‍👨🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 1 Digit One 🏴󠁢󠁯󠁴󠁿 Flag for Tarija (BO-T) 👨🏾‍👩🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁩󠁣󠁩󠁿 Flag for Cibitoke (BI-CI) 🏴󠁭󠁶󠁵󠁳󠁿 Flag for Upper South Province (MV-US) 🏴󠁡󠁤󠀰󠀲󠁿 Flag for Canillo (AD-02) 🏴󠁡󠁦󠁢󠁡󠁭󠁿 Flag for Bamyan (AF-BAM) 🏴󠁡󠁤󠀰󠀳󠁿 Flag for Encamp (AD-03) 🏴󠁵󠁳󠁭󠁰󠁿 Flag for Northern Mariana Islands (US-MP) 🏴󠁬󠁶󠀰󠀱󠀲󠁿 Flag for Babīte (LV-012) 🏴󠁥󠁣󠁸󠁿 Flag for Cotopaxi (EC-X) 🏴󠁧󠁡󠀴󠁿 Flag for Ngounié (GA-4) * Asterisk 󠁺 Tag Latin Small Letter Z 🏴󠁡󠁤󠀰󠀴󠁿 Flag for La Massana (AD-04) 󠀳 Tag Digit Three 👩🏼‍❤️‍💋‍👩🏻 Kiss - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone 🏴󠁭󠁥󠀰󠀳󠁿 Flag for Berane (ME-03) 👨🏿‍❤️‍💋‍👨🏽 Kiss - Man: Dark Skin Tone, Man: Medium Skin Tone 🏴󠁤󠁯󠀳󠀷󠁿 Flag for El Valle (DO-37) 👩🏾‍❤️‍👩🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁫󠁥󠀰󠀱󠁿 Flag for Baringo (KE-01) 🏴󠁹󠁥󠁳󠁡󠁿 Flag for Amanat Al Asimah (YE-SA) 👨🏼‍👨🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 󠀲 Tag Digit Two 🏴󠁭󠁴󠀲󠀰󠁿 Flag for Senglea (MT-20) 🕴️‍♀️ Woman in Business Suit Levitating 🏴󠁣󠁦󠁨󠁭󠁿 Flag for Haut-Mbomou (CF-HM) 󠀱 Tag Digit One 󠀴 Tag Digit Four 🏴󠁡󠁺󠁡󠁢󠁳󠁿 Flag for Absheron (AZ-ABS) 6 Digit Six 🏴󠁬󠁡󠁳󠁶󠁿 Flag for Savannakhet (LA-SV) 🏴󠁭󠁬󠀱󠁿 Flag for Kayes (ML-1) 🏴󠁡󠁥󠁡󠁺󠁿 Flag for Abu Dhabi (AE-AZ) 🏴󠁥󠁳󠁡󠁳󠁿 Flag for Asturias (ES-AS) 🏴󠁩󠁱󠁫󠁩󠁿 Flag for Kirkuk (IQ-KI) 👩‍❤️‍👩🏽 Couple With Heart - Woman, Woman: Medium Skin Tone 🏴󠁤󠁥󠁢󠁥󠁿 Flag for Berlin (DE-BE) 8 Digit Eight 🏴󠁡󠁤󠀰󠀸󠁿 Flag for Escaldes-Engordany (AD-08) 🏴󠁣󠁮󠀶󠀴󠁿 Flag for Ningxia (CN-64) 🏴󠁥󠁣󠁦󠁿 Flag for Cañar (EC-F) 🏴󠁡󠁥󠁡󠁪󠁿 Flag for Ajman (AE-AJ) 🕴🏻‍♀️ Woman in Business Suit Levitating: Light Skin Tone 👨🏻‍❤️‍💋‍👩 Kiss - Man: Light Skin Tone, Woman 󠀸 Tag Digit Eight 🏴󠁩󠁲󠀱󠀴󠁿 Flag for Fars (IR-14) 🏴󠁡󠁥󠁦󠁵󠁿 Flag for Fujairah (AE-FU) 👨🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁨󠁲󠀱󠀰󠁿 Flag for Virovitica-Podravina (HR-10) 󠁩 Tag Latin Small Letter I 7 Digit Seven 󠀷 Tag Digit Seven 󠁥 Tag Latin Small Letter E 👩🏼‍👩🏼‍👧🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁭󠁨󠁴󠁿 Flag for Ratak Chain (MH-T) 🏴󠁡󠁥󠁳󠁨󠁿 Flag for Sharjah (AE-SH) 󠁦 Tag Latin Small Letter F 🏴󠁬󠁴󠀵󠀷󠁿 Flag for Vilniaus Municipality (LT-57) 🏴󠁩󠁳󠀴󠁿 Flag for Westfjords (IS-4) 🏴󠁣󠁡󠁢󠁣󠁿 Flag for British Columbia (CA-BC) 4 Digit Four 🏴󠁡󠁦󠁢󠁡󠁬󠁿 Flag for Balkh (AF-BAL) 👨‍👶‍👦 Family: Man, Baby, Boy 🏴󠁴󠁷󠁨󠁳󠁱󠁿 Flag for Hsinchu County (TW-HSQ) 👩‍👶‍👧 Family: Woman, Baby, Girl 🏴󠁭󠁸󠁪󠁡󠁬󠁿 Flag for Jalisco (MX-JAL) 🏴󠁫󠁥󠀱󠀸󠁿 Flag for Kitui (KE-18) 🏴󠁰󠁴󠀲󠀰󠁿 Flag for Azores (PT-20) 🏴󠁩󠁮󠁭󠁮󠁿 Flag for Manipur (IN-MN) 🏴󠁡󠁦󠁢󠁤󠁳󠁿 Flag for Badakhshan (AF-BDS) 👩🏻‍❤️‍👩🏼 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁡󠁤󠀰󠀵󠁿 Flag for Ordino (AD-05) 👩🏽‍❤️‍💋‍👩 Kiss - Woman: Medium Skin Tone, Woman 🏴󠁡󠁦󠁢󠁧󠁬󠁿 Flag for Baghlan (AF-BGL) 🏴󠁮󠁧󠁣󠁲󠁿 Flag for Cross River (NG-CR) 🏴󠁵󠁳󠁣󠁯󠁿 Flag for Colorado (US-CO) 󠁴 Tag Latin Small Letter T 🏴󠁭󠁫󠀶󠀴󠁿 Flag for Radoviš (MK-64) 🏴󠁮󠁺󠁷󠁧󠁮󠁿 Flag for Wellington (NZ-WGN) 👨🏽‍👨🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁩󠁲󠀱󠀶󠁿 Flag for Kurdistan (IR-16) 👨🏽‍❤️‍💋‍👨🏿 Kiss - Man: Medium Skin Tone, Man: Dark Skin Tone 󠁳 Tag Latin Small Letter S 👩‍👶‍👶 Family: Woman, Baby, Baby 🏴󠁡󠁦󠁤󠁡󠁹󠁿 Flag for Daykundi (AF-DAY) 👨🏻‍❤️‍💋‍👨🏾 Kiss - Man: Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁡󠁦󠁦󠁲󠁡󠁿 Flag for Farah (AF-FRA) 󠁱 Tag Latin Small Letter Q 🏴󠁧󠁴󠁧󠁵󠁿 Flag for Guatemala (GT-GU) 🏴󠁣󠁨󠁴󠁧󠁿 Flag for Thurgau (CH-TG) 🏴󠁲󠁵󠁣󠁥󠁿 Flag for Chechen (RU-CE) 󠀵 Tag Digit Five 🏴󠁡󠁦󠁧󠁨󠁯󠁿 Flag for Ghōr (AF-GHO) 🏴󠁡󠁴󠀹󠁿 Flag for Vienna (AT-9) 🏴󠁡󠁦󠁧󠁨󠁡󠁿 Flag for Ghazni (AF-GHA) 󠁵 Tag Latin Small Letter U 🏴󠁢󠁷󠁧󠁡󠁿 Flag for Gaborone (BW-GA) 󠁹 Tag Latin Small Letter Y 󠁿 Cancel Tag 󠁷 Tag Latin Small Letter W 👩🏽‍❤️‍👩🏿 Couple With Heart - Woman: Medium Skin Tone, Woman: Dark Skin Tone 🏴󠁣󠁯󠁡󠁭󠁡󠁿 Flag for Amazonas (CO-AMA) 󠁮 Tag Latin Small Letter N 👩‍❤️‍💋‍👩🏽 Kiss - Woman, Woman: Medium Skin Tone 👨‍👶 Family: Man, Baby 🏴󠁡󠁴󠀱󠁿 Flag for Burgenland (AT-1) 🏴󠁡󠁦󠁨󠁥󠁬󠁿 Flag for Helmand (AF-HEL) 󠀶 Tag Digit Six 🏴󠁡󠁦󠁪󠁯󠁷󠁿 Flag for Jowzjan (AF-JOW) 🧕‍♀️ Woman With Headscarf 󠁢 Tag Latin Small Letter B 󠀰 Tag Digit Zero 🏴󠁡󠁦󠁨󠁥󠁲󠁿 Flag for Herat (AF-HER) 🏴󠁧󠁤󠀰󠀵󠁿 Flag for Saint Mark (GD-05) 3 Digit Three 󠁧 Tag Latin Small Letter G 🕴🏾‍♀️ Woman in Business Suit Levitating: Medium-Dark Skin Tone 👩🏽‍❤️‍💋‍👨🏽 Kiss - Woman: Medium Skin Tone, Man: Medium Skin Tone 🏴󠁵󠁳󠁡󠁫󠁿 Flag for Alaska (US-AK) 󠁲 Tag Latin Small Letter R 🏴󠁴󠁬󠁬󠁡󠁿 Flag for Lautém (TL-LA) 🏴󠁡󠁦󠁫󠁡󠁢󠁿 Flag for Kabul (AF-KAB) 👨‍❤️‍💋‍👨🏿 Kiss - Man, Man: Dark Skin Tone 🧕‍♂️ Man With Headscarf 󠁶 Tag Latin Small Letter V 󠁤 Tag Latin Small Letter D 🏴󠁡󠁦󠁫󠁡󠁮󠁿 Flag for Kandahar (AF-KAN) 🏴󠁡󠁦󠁫󠁡󠁰󠁿 Flag for Kapisa (AF-KAP) 🏴󠁭󠁣󠁳󠁲󠁿 Flag for Saint Roman (MC-SR) 🏴󠁥󠁥󠀳󠀹󠁿 Flag for Hiiu (EE-39) 󠁭 Tag Latin Small Letter M 🏴󠁡󠁦󠁫󠁨󠁯󠁿 Flag for Khost (AF-KHO) 🧕🏻‍♂️ Man With Headscarf: Light Skin Tone 🏴󠁡󠁦󠁫󠁤󠁺󠁿 Flag for Kunduz (AF-KDZ) 👩🏿‍❤️‍👨 Couple With Heart - Woman: Dark Skin Tone, Man 🏴󠁵󠁳󠁳󠁤󠁿 Flag for South Dakota (US-SD) 🏴󠁡󠁦󠁢󠁤󠁧󠁿 Flag for Badghis (AF-BDG) 🏴󠁩󠁳󠀸󠁿 Flag for Southern (IS-8) 🏴󠁡󠁦󠁫󠁮󠁲󠁿 Flag for Kunar (AF-KNR) 👨‍👨‍👶‍👶 Family: Man, Man, Baby, Baby 🏴󠁪󠁰󠀱󠀳󠁿 Flag for Tokyo (JP-13) 🏴󠁡󠁦󠁬󠁡󠁧󠁿 Flag for Laghman (AF-LAG) 🧕🏽‍♂️ Man With Headscarf: Medium Skin Tone 🏴󠁡󠁦󠁬󠁯󠁧󠁿 Flag for Logar (AF-LOG) 5 Digit Five 󠁣 Tag Latin Small Letter C 🏴󠁡󠁦󠁦󠁹󠁢󠁿 Flag for Faryab (AF-FYB) 󠁰 Tag Latin Small Letter P 🏴󠁡󠁦󠁮󠁡󠁮󠁿 Flag for Nangarhar (AF-NAN) 󠀹 Tag Digit Nine 🏴󠁥󠁳󠁮󠁣󠁿 Flag for Navarra Chartered Community (ES-NC) 👩🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁭󠁸󠁮󠁡󠁹󠁿 Flag for Nayarit (MX-NAY) 🏴󠁢󠁲󠁰󠁥󠁿 Flag for Pernambuco (BR-PE) 🏴󠁩󠁴󠀷󠀲󠁿 Flag for Campania (IT-72) 🧕🏾‍♂️ Man With Headscarf: Medium-Dark Skin Tone 👩🏽‍❤️‍💋‍👩🏾 Kiss - Woman: Medium Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁦󠁮󠁵󠁲󠁿 Flag for Nuristan (AF-NUR) 👨‍👨‍👧‍👶 Family: Man, Man, Girl, Baby 🏴󠁰󠁧󠁷󠁢󠁫󠁿 Flag for West New Britain (PG-WBK) 👨🏼‍👩🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁧󠁹󠁵󠁤󠁿 Flag for Upper Demerara-Berbice (GY-UD) 👨‍❤️‍💋‍👩 Kiss - Man, Woman 🏴󠁥󠁴󠁡󠁦󠁿 Flag for Afar (ET-AF) 🏴󠁡󠁦󠁰󠁡󠁲󠁿 Flag for Parwan (AF-PAR) 🏴󠁡󠁦󠁮󠁩󠁭󠁿 Flag for Nimruz (AF-NIM) 🏴󠁨󠁲󠀰󠀴󠁿 Flag for Karlovac (HR-04) 🏴󠁡󠁦󠁰󠁩󠁡󠁿 Flag for Paktia (AF-PIA) 🧕🏿‍♂️ Man With Headscarf: Dark Skin Tone 🧕🏼‍♂️ Man With Headscarf: Medium-Light Skin Tone 🏴󠁭󠁸󠁢󠁣󠁮󠁿 Flag for Baja California (MX-BCN) 🏴󠁡󠁦󠁰󠁫󠁡󠁿 Flag for Paktika (AF-PKA) 🏴󠁫󠁩󠁰󠁿 Flag for Phoenix Islands (KI-P) 󠁯 Tag Latin Small Letter O 🏴󠁡󠁦󠁰󠁡󠁮󠁿 Flag for Panjshir (AF-PAN) 🏴󠁣󠁨󠁴󠁩󠁿 Flag for Ticino (CH-TI) 🏴󠁳󠁩󠀱󠀹󠀲󠁿 Flag for Žirovnica (SI-192) 🏴󠁳󠁥󠁮󠁿 Flag for Halland (SE-N) 󠁪 Tag Latin Small Letter J 👩🏽‍❤️‍💋‍👩🏻 Kiss - Woman: Medium Skin Tone, Woman: Light Skin Tone 👨🏾‍👨🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁳󠁳󠁢󠁮󠁿 Flag for Northern Bahr el Ghazal (SS-BN) 👨🏽‍❤️‍💋‍👩 Kiss - Man: Medium Skin Tone, Woman 🏴󠁣󠁦󠁢󠁫󠁿 Flag for Basse-Kotto (CF-BK) 👨‍❤️‍👨🏻 Couple With Heart - Man, Man: Light Skin Tone 👨🏽‍❤️‍👨 Couple With Heart - Man: Medium Skin Tone, Man 🏴󠁬󠁹󠁢󠁵󠁿 Flag for Butnan (LY-BU) 👩‍👶 Family: Woman, Baby 🏴󠁬󠁫󠀹󠁿 Flag for Sabaragamuwa (LK-9) 🏴󠁡󠁦󠁳󠁡󠁭󠁿 Flag for Samangan (AF-SAM) 🏴󠁴󠁶󠁮󠁫󠁬󠁿 Flag for Nukulaelae (TV-NKL) 🏴󠁡󠁥󠁲󠁫󠁿 Flag for Ras al-Khaimah (AE-RK) 🏴󠁥󠁳󠁣󠁥󠁿 Flag for Ceuta (ES-CE) 🏴󠁡󠁥󠁤󠁵󠁿 Flag for Dubai (AE-DU) 👨🏻‍👨🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 🏴󠁪󠁰󠀴󠀷󠁿 Flag for Okinawa (JP-47) 🏴󠁡󠁦󠁳󠁡󠁲󠁿 Flag for Sar-e Pol (AF-SAR) 👩🏼‍👩🏼‍👦🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 󠁬 Tag Latin Small Letter L 🏴󠁡󠁦󠁵󠁲󠁵󠁿 Flag for Urozgan (AF-URU) 9 Digit Nine 👩🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨‍❤️‍💋‍👨🏽 Kiss - Man, Man: Medium Skin Tone 🏴󠁤󠁭󠀰󠀶󠁿 Flag for Saint Joseph (DM-06) 🏴󠁡󠁧󠀰󠀴󠁿 Flag for Saint John (AG-04) 🏴󠁣󠁯󠁶󠁩󠁤󠁿 Flag for Vichada (CO-VID) 🏴󠁰󠁷󠀲󠀱󠀸󠁿 Flag for Ngarchelong (PW-218) 🏴󠁲󠁵󠁡󠁲󠁫󠁿 Flag for Arkhangelsk (RU-ARK) 🏴󠁡󠁦󠁺󠁡󠁢󠁿 Flag for Zabul (AF-ZAB) 🏴󠁡󠁧󠀰󠀳󠁿 Flag for Saint George (AG-03) 🏴󠁩󠁴󠀲󠀵󠁿 Flag for Lombardy (IT-25) 👨🏻‍❤️‍💋‍👨🏻 Kiss - Man: Light Skin Tone, Man: Light Skin Tone 🏴󠁣󠁺󠀵󠀳󠁿 Flag for Pardubický kraj (CZ-53) 🏴󠁡󠁧󠀰󠀶󠁿 Flag for Saint Paul (AG-06) 🏴󠁶󠁮󠀵󠀱󠁿 Flag for Trà Vinh (VN-51) 👩‍👨‍👶‍👧 Family: Woman, Man, Baby, Girl 🏴󠁫󠁲󠀴󠀸󠁿 Flag for South Gyeongsang (KR-48) 🏴󠁡󠁧󠀰󠀵󠁿 Flag for Saint Mary (AG-05) 🏴󠁧󠁲󠁫󠁿 Flag for North Aegean (GR-K) 👩‍👩‍👶‍👧 Family: Woman, Woman, Baby, Girl 🏴󠁥󠁣󠁺󠁿 Flag for Zamora-Chinchipe (EC-Z) 🏴󠁮󠁩󠁭󠁳󠁿 Flag for Masaya (NI-MS) 🏴󠁫󠁩󠁧󠁿 Flag for Gilbert Islands (KI-G) 🏴󠁭󠁸󠁣󠁨󠁨󠁿 Flag for Chihuahua (MX-CHH) 👨🏼‍👨🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👨🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 👩🏽‍👧🏽‍👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 👩‍👨‍👶‍👶 Family: Woman, Man, Baby, Baby 🏴󠁡󠁧󠀱󠀱󠁿 Flag for Redonda (AG-11) 👩‍👩‍👶 Family: Woman, Woman, Baby 👨‍❤️‍💋‍👩🏻 Kiss - Man, Woman: Light Skin Tone 👨‍❤️‍💋‍👨🏾 Kiss - Man, Man: Medium-Dark Skin Tone 🏴󠁡󠁬󠀰󠀱󠁿 Flag for Berat County (AL-01) 󠁡 Tag Latin Small Letter A 🏴󠁡󠁧󠀱󠀰󠁿 Flag for Barbuda (AG-10) 🏴󠁣󠁯󠁳󠁡󠁰󠁿 Flag for San Andrés & Providencia (CO-SAP) 🏴󠁡󠁬󠀰󠀳󠁿 Flag for Elbasan County (AL-03) 👨🏾‍👨🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁩󠁮󠁫󠁡󠁿 Flag for Karnataka (IN-KA) 🏴󠁡󠁬󠀰󠀵󠁿 Flag for Gjirokastër County (AL-05) 🏴󠁪󠁰󠀰󠀱󠁿 Flag for Hokkaidō (JP-01) 👩🏾‍👨🏾‍👶🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁵󠁧󠁣󠁿 Flag for Central (UG-C) 👨🏼‍❤️‍💋‍👨 Kiss - Man: Medium-Light Skin Tone, Man 🏴󠁡󠁬󠀰󠀲󠁿 Flag for Durrës County (AL-02) 🏴󠁡󠁬󠀰󠀴󠁿 Flag for Fier County (AL-04) 🏴󠁡󠁬󠀰󠀶󠁿 Flag for Korçë County (AL-06) 🏴󠁰󠁹󠀱󠀶󠁿 Flag for Alto Paraguay (PY-16) 🏴󠁡󠁬󠀰󠀷󠁿 Flag for Kukës County (AL-07) 👨🏿‍❤️‍💋‍👨 Kiss - Man: Dark Skin Tone, Man 🏴󠁧󠁹󠁵󠁴󠁿 Flag for Upper Takutu-Upper Essequibo (GY-UT) 👨🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏿‍👨🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍👨🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁡󠁬󠀰󠀹󠁿 Flag for Dibër County (AL-09) 🏴󠁡󠁬󠀰󠀸󠁿 Flag for Lezhë County (AL-08) 👨🏼‍👨🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁡󠁬󠀱󠀱󠁿 Flag for Tirana County (AL-11) 🏴󠁡󠁤󠀰󠀶󠁿 Flag for Sant Julià de Lòria (AD-06) 🏴󠁢󠁲󠁢󠁡󠁿 Flag for Bahia (BR-BA) 🏴󠁡󠁬󠀱󠀰󠁿 Flag for Shkodër County (AL-10) 👩‍❤️‍💋‍👨🏿 Kiss - Woman, Man: Dark Skin Tone 👨🏽‍👨🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👨🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👩‍❤️‍💋‍👨🏽 Kiss - Woman, Man: Medium Skin Tone 🏴󠁡󠁬󠀱󠀲󠁿 Flag for Vlorë County (AL-12) 🏴󠁴󠁨󠀲󠀳󠁿 Flag for Trat (TH-23) 🏴󠁡󠁭󠁧󠁲󠁿 Flag for Gegharkunik (AM-GR) 👨🏿‍👨🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁡󠁭󠁡󠁧󠁿 Flag for Aragatsotn (AM-AG) 🏴󠁡󠁭󠁡󠁲󠁿 Flag for Ararat (AM-AR) 🏴󠁡󠁭󠁥󠁲󠁿 Flag for Yerevan (AM-ER) 🏴󠁡󠁭󠁫󠁴󠁿 Flag for Kotayk (AM-KT) 🏴󠁦󠁲󠁣󠁯󠁲󠁿 Flag for Corse (FR-COR) 🏴󠁡󠁭󠁡󠁶󠁿 Flag for Armavir (AM-AV) 👩‍❤️‍💋‍👩🏿 Kiss - Woman, Woman: Dark Skin Tone 🏴󠁢󠁲󠁭󠁧󠁿 Flag for Minas Gerais (BR-MG) 🏴󠁣󠁧󠀱󠀶󠁿 Flag for Pointe-Noire (CG-16) 🏴󠁡󠁭󠁬󠁯󠁿 Flag for Lori (AM-LO) 🏴󠁤󠁺󠀲󠀱󠁿 Flag for Skikda (DZ-21) 🏴󠁡󠁭󠁳󠁨󠁿 Flag for Shirak (AM-SH) 👩‍❤️‍💋‍👩🏾 Kiss - Woman, Woman: Medium-Dark Skin Tone 🏴󠁡󠁤󠀰󠀷󠁿 Flag for Andorra la Vella (AD-07) 🏴󠁲󠁵󠁡󠁬󠁴󠁿 Flag for Altai Krai (RU-ALT) 🏴󠁳󠁩󠀱󠀶󠀷󠁿 Flag for Lovrenc na Pohorju (SI-167) 👩‍❤️‍💋‍👩🏼 Kiss - Woman, Woman: Medium-Light Skin Tone 👨🏿‍❤️‍💋‍👩🏻 Kiss - Man: Dark Skin Tone, Woman: Light Skin Tone 🏴󠁬󠁴󠁰󠁮󠁿 Flag for Panevėžys County (LT-PN) 🏴󠁤󠁯󠀳󠀵󠁿 Flag for Cibao Norte (DO-35) 🏴󠁮󠁯󠀱󠀰󠁿 Flag for Vest-Agder (NO-10) 👨‍❤️‍💋‍👩🏿 Kiss - Man, Woman: Dark Skin Tone 🏴󠁡󠁭󠁶󠁤󠁿 Flag for Vayots Dzor (AM-VD) 👩🏻‍❤️‍💋‍👩🏻 Kiss - Woman: Light Skin Tone, Woman: Light Skin Tone 🏴󠁵󠁳󠁶󠁴󠁿 Flag for Vermont (US-VT) 👨🏽‍❤️‍💋‍👨 Kiss - Man: Medium Skin Tone, Man 🏴󠁡󠁯󠁢󠁧󠁯󠁿 Flag for Bengo (AO-BGO) 👩🏻‍❤️‍💋‍👩 Kiss - Woman: Light Skin Tone, Woman 🏴󠁣󠁯󠁭󠁥󠁴󠁿 Flag for Meta (CO-MET) 🏴󠁮󠁬󠁢󠁱󠀲󠁿 Flag for Saba (NL-BQ2) 👩🏽‍❤️‍💋‍👩🏼 Kiss - Woman: Medium Skin Tone, Woman: Medium-Light Skin Tone 👨🏽‍👩🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁡󠁯󠁢󠁧󠁵󠁿 Flag for Benguela (AO-BGU) 🏴󠁣󠁯󠁳󠁵󠁣󠁿 Flag for Sucre (CO-SUC) 🏴󠁡󠁯󠁣󠁣󠁵󠁿 Flag for Cuando Cubango (AO-CCU) 🏴󠁰󠁥󠁭󠁤󠁤󠁿 Flag for Madre de Dios (PE-MDD) 🏴󠁣󠁨󠁶󠁤󠁿 Flag for Vaud (CH-VD) 🏴󠁡󠁯󠁢󠁩󠁥󠁿 Flag for Bié (AO-BIE) 🏴󠁡󠁯󠁣󠁡󠁢󠁿 Flag for Cabinda (AO-CAB) 🏴󠁡󠁯󠁨󠁵󠁩󠁿 Flag for Huíla (AO-HUI) 🏴󠁡󠁯󠁣󠁵󠁳󠁿 Flag for Cuanza Sul (AO-CUS) 👨‍❤️‍💋‍👩🏽 Kiss - Man, Woman: Medium Skin Tone 👩‍👩‍👦‍👶 Family: Woman, Woman, Boy, Baby 🏴󠁡󠁯󠁨󠁵󠁡󠁿 Flag for Huambo (AO-HUA) 👨🏼‍❤️‍👩🏾 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁣󠁹󠀰󠀶󠁿 Flag for Kyrenia (CY-06) 👩🏼‍❤️‍💋‍👨🏻 Kiss - Woman: Medium-Light Skin Tone, Man: Light Skin Tone 🏴󠁡󠁥󠁵󠁱󠁿 Flag for Umm al-Quwain (AE-UQ) 🏴󠁡󠁯󠁬󠁳󠁵󠁿 Flag for Lunda Sul (AO-LSU) 🏴󠁬󠁲󠁣󠁭󠁿 Flag for Grand Cape Mount (LR-CM) 🏴󠁡󠁯󠁬󠁮󠁯󠁿 Flag for Lunda Norte (AO-LNO) 👩🏽‍❤️‍👨🏿 Couple With Heart - Woman: Medium Skin Tone, Man: Dark Skin Tone 👨🏾‍❤️‍👩🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁯󠁣󠁮󠁯󠁿 Flag for Cuanza Norte (AO-CNO) 🏴󠁡󠁯󠁭󠁡󠁬󠁿 Flag for Malanje (AO-MAL) 👩🏼‍❤️‍💋‍👩 Kiss - Woman: Medium-Light Skin Tone, Woman 👨🏼‍👩🏼‍👦🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁡󠁯󠁭󠁯󠁸󠁿 Flag for Moxico (AO-MOX) 🏴󠁡󠁯󠁮󠁡󠁭󠁿 Flag for Namibe (AO-NAM) 👨🏾‍👩🏾‍👦🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 󠁫 Tag Latin Small Letter K 🕴🏼‍♀️ Woman in Business Suit Levitating: Medium-Light Skin Tone 🏴󠁡󠁲󠁡󠁿 Flag for Salta (AR-A) 👨🏾‍👩🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁣󠁤󠁬󠁵󠁿 Flag for Lualaba (CD-LU) 🏴󠁡󠁲󠁢󠁿 Flag for Buenos Aires Province (AR-B) 👨🏿‍👩🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁡󠁲󠁤󠁿 Flag for San Luis (AR-D) 🏴󠁡󠁯󠁺󠁡󠁩󠁿 Flag for Zaire (AO-ZAI) 🏴󠁴󠁲󠀰󠀳󠁿 Flag for Afyonkarahisar (TR-03) 0 Digit Zero 🏴󠁶󠁮󠀲󠀵󠁿 Flag for Quảng Trị (VN-25) 🕴🏿‍♀️ Woman in Business Suit Levitating: Dark Skin Tone 🏴󠁡󠁯󠁵󠁩󠁧󠁿 Flag for Uíge (AO-UIG) 👩🏾‍👧🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁵󠁡󠀱󠀸󠁿 Flag for Zhytomyrshchyna (UA-18) 👨🏾‍❤️‍💋‍👨🏽 Kiss - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁣󠁯󠁣󠁥󠁳󠁿 Flag for Cesar (CO-CES) 🏴󠁡󠁭󠁳󠁵󠁿 Flag for Syunik (AM-SU) 🏴󠁡󠁲󠁥󠁿 Flag for Entre Ríos (AR-E) 👨🏿‍❤️‍💋‍👩 Kiss - Man: Dark Skin Tone, Woman 🏴󠁡󠁲󠁦󠁿 Flag for La Rioja (AR-F) 🏴󠁫󠁺󠁶󠁯󠁳󠁿 Flag for East Kazakhstan (KZ-VOS) 🏴󠁡󠁦󠁷󠁡󠁲󠁿 Flag for Maidan Wardak (AF-WAR) 🏴󠁡󠁲󠁪󠁿 Flag for San Juan (AR-J) 👩🏾‍👩🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁡󠁯󠁬󠁵󠁡󠁿 Flag for Luanda (AO-LUA) 🏴󠁡󠁲󠁬󠁿 Flag for La Pampa (AR-L) 👩🏼‍❤️‍💋‍👩🏽 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone 👨🏼‍👩🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏼‍👩🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁡󠁲󠁫󠁿 Flag for Catamarca (AR-K) 🏴󠁡󠁲󠁲󠁿 Flag for Río Negro (AR-R) 🏴󠁡󠁲󠁨󠁿 Flag for Chaco (AR-H) 🏴󠁡󠁲󠁰󠁿 Flag for Formosa (AR-P) 🏴󠁡󠁲󠁭󠁿 Flag for Mendoza (AR-M) 🏴󠁡󠁲󠁮󠁿 Flag for Misiones (AR-N) 🏴󠁡󠁲󠁱󠁿 Flag for Neuquén (AR-Q) 👨🏽‍👩🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁡󠁲󠁴󠁿 Flag for Tucumán (AR-T) 🏴󠁡󠁲󠁳󠁿 Flag for Santa Fe (AR-S) 🏴󠁡󠁲󠁷󠁿 Flag for Corrientes (AR-W) 🏴󠁡󠁲󠁹󠁿 Flag for Jujuy (AR-Y) 🏴󠁡󠁲󠁶󠁿 Flag for Tierra del Fuego (AR-V) 🏴󠁡󠁲󠁵󠁿 Flag for Chubut (AR-U) 🏴󠁡󠁲󠁸󠁿 Flag for Córdoba (AR-X) 🏴󠁡󠁲󠁺󠁿 Flag for Santa Cruz (AR-Z) 🏴󠁡󠁲󠁧󠁿 Flag for Santiago del Estero (AR-G) 🏴󠁡󠁴󠀲󠁿 Flag for Carinthia (AT-2) 🏴󠁣󠁨󠁢󠁬󠁿 Flag for Basel-Landschaft (CH-BL) 👩🏿‍👧🏿‍👧🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍👩🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👩🏻‍👧🏻‍👶🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨‍👨‍👦‍👧 Family: Man, Man, Boy, Girl 🏴󠁡󠁴󠀳󠁿 Flag for Lower Austria (AT-3) 👩‍👶‍👦 Family: Woman, Baby, Boy 🏴󠁭󠁲󠀱󠀳󠁿 Flag for Nouakchott Ouest (MR-13) 👨🏼‍👩🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁣󠁦󠁭󠁢󠁿 Flag for Mbomou (CF-MB) 🏴󠁡󠁴󠀶󠁿 Flag for Styria (AT-6) 🏴󠁰󠁨󠀰󠀱󠁿 Flag for Ilocos (PH-01) 🏴󠁡󠁴󠀷󠁿 Flag for Tyrol (AT-7) 🏴󠁣󠁮󠀵󠀲󠁿 Flag for Guizhou (CN-52) 🏴󠁬󠁡󠁸󠁳󠁿 Flag for Xaisomboun (LA-XS) 🏴󠁡󠁴󠀸󠁿 Flag for Vorarlberg (AT-8) 👨🏼‍👨🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁡󠁴󠀵󠁿 Flag for Salzburg (AT-5) 👨🏿‍👩🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👩‍👩‍👶‍👶 Family: Woman, Woman, Baby, Baby 👩‍👨‍👧‍👦 Family: Woman, Man, Girl, Boy 👩‍👨‍👧 Family: Woman, Man, Girl 👩‍👦‍👶 Family: Woman, Boy, Baby 🏴󠁡󠁵󠁮󠁳󠁷󠁿 Flag for New South Wales (AU-NSW) 👩‍👨‍👧‍👶 Family: Woman, Man, Girl, Baby 👩🏽‍👧🏽‍👶🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁡󠁵󠁮󠁴󠁿 Flag for Northern Territory (AU-NT) 👩🏿‍👧🏿‍👦🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁡󠁵󠁱󠁬󠁤󠁿 Flag for Queensland (AU-QLD) 2 Digit Two 👩‍👨‍👧‍👧 Family: Woman, Man, Girl, Girl 👩🏼‍👧🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁡󠁴󠀴󠁿 Flag for Upper Austria (AT-4) 🏴󠁧󠁲󠁡󠁿 Flag for East Macedonia and Thrace (GR-A) 👨🏽‍👩🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👨🏾‍👩🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨‍👶‍👧 Family: Man, Baby, Girl 👨🏻‍👩🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone 👨🏿‍👩🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👩‍👨‍👶 Family: Woman, Man, Baby 🏴󠁵󠁳󠁮󠁥󠁿 Flag for Nebraska (US-NE) 🏴󠁡󠁺󠁡󠁧󠁡󠁿 Flag for Agstafa (AZ-AGA) 🏴󠁡󠁦󠁴󠁡󠁫󠁿 Flag for Takhar (AF-TAK) 🏴󠁡󠁵󠁷󠁡󠁿 Flag for Western Australia (AU-WA) 🏴󠁡󠁺󠁡󠁧󠁣󠁿 Flag for Aghjabadi (AZ-AGC) 🏴󠁡󠁺󠁡󠁳󠁴󠁿 Flag for Astara (AZ-AST) 🏴󠁡󠁺󠁢󠁡󠁬󠁿 Flag for Balakan (AZ-BAL) 👩‍❤️‍💋‍👨🏼 Kiss - Woman, Man: Medium-Light Skin Tone 🏴󠁵󠁳󠁣󠁡󠁿 Flag for California (US-CA) 🏴󠁡󠁺󠁡󠁧󠁳󠁿 Flag for Agdash (AZ-AGS) 🏴󠁡󠁺󠁢󠁡󠁿 Flag for Baku (AZ-BA) 👨🏻‍❤️‍💋‍👩🏿 Kiss - Man: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁵󠁶󠁩󠁣󠁿 Flag for Victoria (AU-VIC) 🏴󠁡󠁺󠁡󠁧󠁭󠁿 Flag for Agdam (AZ-AGM) 👨🏻‍👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone 🏴󠁡󠁺󠁢󠁡󠁲󠁿 Flag for Barda (AZ-BAR) 👨🏽‍👩🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone 👩🏾‍👧🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁡󠁺󠁡󠁧󠁵󠁿 Flag for Agsu (AZ-AGU) 🏴󠁣󠁤󠁴󠁡󠁿 Flag for Tanganyika (CD-TA) 👩🏻‍❤️‍👨🏼 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁡󠁺󠁢󠁩󠁬󠁿 Flag for Bilasuvar (AZ-BIL) 🏴󠁡󠁺󠁣󠁡󠁬󠁿 Flag for Jalilabad (AZ-CAL) 🏴󠁡󠁺󠁣󠁡󠁢󠁿 Flag for Jabrayil (AZ-CAB) 🏴󠁡󠁺󠁢󠁥󠁹󠁿 Flag for Beylagan (AZ-BEY) 🏴󠁳󠁩󠀰󠀸󠀵󠁿 Flag for Novo Mesto (SI-085) 🏴󠁣󠁧󠀹󠁿 Flag for Niari (CG-9) 🏴󠁡󠁺󠁤󠁡󠁳󠁿 Flag for Dashkasan (AZ-DAS) 🏴󠁡󠁺󠁦󠁵󠁺󠁿 Flag for Fizuli (AZ-FUZ) 👩🏿‍❤️‍💋‍👨🏽 Kiss - Woman: Dark Skin Tone, Man: Medium Skin Tone 👨🏿‍❤️‍👨🏾 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁡󠁺󠁧󠁯󠁹󠁿 Flag for Goychay (AZ-GOY) 🏴󠁡󠁺󠁧󠁯󠁲󠁿 Flag for Goranboy (AZ-GOR) 🏴󠁡󠁺󠁧󠁡󠁿 Flag for Ganja (AZ-GA) 🏴󠁱󠁡󠁵󠁳󠁿 Flag for Umm Salal (QA-US) 🏴󠁦󠁪󠁥󠁿 Flag for Eastern (FJ-E) 🏴󠁡󠁺󠁧󠁹󠁧󠁿 Flag for Goygol (AZ-GYG) 🏴󠁡󠁺󠁨󠁡󠁣󠁿 Flag for Hajigabul (AZ-HAC) 👩🏿‍❤️‍💋‍👩 Kiss - Woman: Dark Skin Tone, Woman 🏴󠁬󠁶󠀰󠀷󠀷󠁿 Flag for Rēzekne Municipality (LV-077) 🏴󠁡󠁵󠁡󠁣󠁴󠁿 Flag for Australian Capital Territory (AU-ACT) 👨🏽‍❤️‍💋‍👩🏾 Kiss - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁮󠁧󠁦󠁣󠁿 Flag for Federal Capital Territory (NG-FC) 🏴󠁲󠁵󠁢󠁲󠁹󠁿 Flag for Bryansk (RU-BRY) 🏴󠁡󠁭󠁴󠁶󠁿 Flag for Tavush (AM-TV) 🏴󠁥󠁣󠁳󠁤󠁿 Flag for Santo Domingo de los Tsáchilas (EC-SD) 👩🏼‍❤️‍👩 Couple With Heart - Woman: Medium-Light Skin Tone, Woman 🏴󠁡󠁺󠁩󠁭󠁩󠁿 Flag for Imishli (AZ-IMI) 🏴󠁴󠁭󠁳󠁿 Flag for Aşgabat (TM-S) 👨‍❤️‍👩🏾 Couple With Heart - Man, Woman: Medium-Dark Skin Tone 🏴󠁬󠁡󠁸󠁥󠁿 Flag for Sekong (LA-XE) 🏴󠁲󠁯󠁧󠁪󠁿 Flag for Gorj (RO-GJ) 👨🏻‍❤️‍👨 Couple With Heart - Man: Light Skin Tone, Man 🏴󠁡󠁺󠁫󠁵󠁲󠁿 Flag for Kurdamir (AZ-KUR) 👩🏻‍👨🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁡󠁺󠁫󠁡󠁬󠁿 Flag for Kalbajar (AZ-KAL) 🏴󠁡󠁺󠁧󠁡󠁤󠁿 Flag for Gadabay (AZ-GAD) 🏴󠁡󠁺󠁬󠁡󠁣󠁿 Flag for Lachin (AZ-LAC) 🏴󠁡󠁺󠁬󠁡󠁿 Flag for Lankaran (AZ-LA) 🏴󠁶󠁮󠁳󠁧󠁿 Flag for Ho Chi Minh City (VN-SG) 🏴󠁡󠁺󠁬󠁥󠁲󠁿 Flag for Lerik (AZ-LER) 🏴󠁡󠁺󠁭󠁩󠁿 Flag for Mingachevir (AZ-MI) 👩🏾‍👨🏾‍👧🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁡󠁺󠁮󠁡󠁿 Flag for Naftalan (AZ-NA) 🏴󠁡󠁺󠁭󠁡󠁳󠁿 Flag for Masally (AZ-MAS) 👨‍❤️‍👩 Couple With Heart - Man, Woman 🏴󠁡󠁺󠁬󠁡󠁮󠁿 Flag for Lankaran District (AZ-LAN) 👩🏼‍👨🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👩🏽‍❤️‍💋‍👨🏾 Kiss - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩🏿‍👧🏿‍👶🏿 Family - Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁡󠁺󠁮󠁥󠁦󠁿 Flag for Neftchala (AZ-NEF) 🏴󠁡󠁺󠁮󠁸󠁿 Flag for Nakhchivan AR (AZ-NX) 🏴󠁳󠁩󠀰󠀱󠀱󠁿 Flag for Celje (SI-011) 🏴󠁬󠁴󠀳󠀲󠁿 Flag for Panevėžio Municipality (LT-32) 👩🏿‍❤️‍💋‍👩🏽 Kiss - Woman: Dark Skin Tone, Woman: Medium Skin Tone 👨🏻‍❤️‍👩🏿 Couple With Heart - Man: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁺󠁩󠁳󠁭󠁿 Flag for Ismailli (AZ-ISM) 󠁨 Tag Latin Small Letter H 👩🏾‍❤️‍👨🏻 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone 👩🏻‍👶🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone 🏴󠁣󠁦󠁮󠁭󠁿 Flag for Nana-Mambéré (CF-NM) 🏴󠁡󠁺󠁱󠁯󠁢󠁿 Flag for Gobustan (AZ-QOB) 👩🏿‍❤️‍💋‍👨🏻 Kiss - Woman: Dark Skin Tone, Man: Light Skin Tone 👩🏿‍❤️‍💋‍👩🏿 Kiss - Woman: Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁺󠁱󠁢󠁩󠁿 Flag for Qubadli (AZ-QBI) 🏴󠁡󠁺󠁱󠁡󠁺󠁿 Flag for Qazakh (AZ-QAZ) 🏴󠁲󠁯󠁢󠁶󠁿 Flag for Braşov (RO-BV) 👨‍👩‍👧‍👶 Family: Man, Woman, Girl, Baby 🏴󠁡󠁺󠁱󠁢󠁡󠁿 Flag for Quba (AZ-QBA) 🏴󠁡󠁺󠁱󠁡󠁢󠁿 Flag for Qabala (AZ-QAB) 🏴󠁣󠁨󠁵󠁲󠁿 Flag for Uri (CH-UR) 🏴󠁡󠁺󠁯󠁧󠁵󠁿 Flag for Oghuz (AZ-OGU) 🏴󠁡󠁺󠁱󠁡󠁸󠁿 Flag for Qakh (AZ-QAX) 🏴󠁳󠁩󠀲󠀰󠀶󠁿 Flag for Šmarješke Toplice (SI-206) 👨🏾‍❤️‍💋‍👩🏿 Kiss - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁡󠁧󠀰󠀷󠁿 Flag for Saint Peter (AG-07) 👨🏻‍👩🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁬󠁲󠁭󠁹󠁿 Flag for Maryland (LR-MY) 🏴󠁡󠁵󠁳󠁡󠁿 Flag for South Australia (AU-SA) 🏴󠁡󠁺󠁱󠁵󠁳󠁿 Flag for Qusar (AZ-QUS) 🏴󠁡󠁺󠁳󠁡󠁢󠁿 Flag for Sabirabad (AZ-SAB) 👨‍❤️‍👩🏽 Couple With Heart - Man, Woman: Medium Skin Tone 👨‍❤️‍👩🏼 Couple With Heart - Man, Woman: Medium-Light Skin Tone 🏴󠁡󠁺󠁳󠁡󠁴󠁿 Flag for Saatly (AZ-SAT) 🏴󠁡󠁺󠁳󠁢󠁮󠁿 Flag for Shabran (AZ-SBN) 👨🏼‍❤️‍👩🏽 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone 🏴󠁡󠁺󠁳󠁡󠁫󠁿 Flag for Shaki District (AZ-SAK) 🏴󠁣󠁯󠁣󠁡󠁳󠁿 Flag for Casanare (CO-CAS) 👨‍👩‍👶‍👶 Family: Man, Woman, Baby, Baby 🏴󠁡󠁺󠁳󠁲󠁿 Flag for Shirvan (AZ-SR) 🏴󠁡󠁺󠁳󠁵󠁳󠁿 Flag for Shusha (AZ-SUS) 🏴󠁣󠁨󠁶󠁳󠁿 Flag for Valais (CH-VS) 👩🏽‍👶🏽 Family - Woman: Medium Skin Tone, Baby: Medium Skin Tone 👩🏻‍❤️‍💋‍👨🏿 Kiss - Woman: Light Skin Tone, Man: Dark Skin Tone 🏴󠁡󠁺󠁳󠁡󠁿 Flag for Shaki (AZ-SA) 🏴󠁦󠁲󠁭󠁱󠁿 Flag for Martinique (FR-MQ) 🏴󠁡󠁺󠁳󠁭󠁿 Flag for Sumqayit (AZ-SM) 🏴󠁡󠁺󠁳󠁩󠁹󠁿 Flag for Siazan (AZ-SIY) 🏴󠁡󠁺󠁳󠁭󠁩󠁿 Flag for Shamakhi (AZ-SMI) 👩🏿‍❤️‍💋‍👨 Kiss - Woman: Dark Skin Tone, Man 🏴󠁡󠁺󠁳󠁭󠁸󠁿 Flag for Samukh (AZ-SMX) 👨🏻‍👩🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 🏴󠁡󠁺󠁴󠁯󠁶󠁿 Flag for Tovuz (AZ-TOV) 🏴󠁡󠁺󠁸󠁡󠁣󠁿 Flag for Khachmaz (AZ-XAC) 🏴󠁡󠁺󠁵󠁣󠁡󠁿 Flag for Ujar (AZ-UCA) 🏴󠁡󠁺󠁴󠁡󠁲󠁿 Flag for Tartar (AZ-TAR) 👨🏿‍❤️‍💋‍👨🏻 Kiss - Man: Dark Skin Tone, Man: Light Skin Tone 👩🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👩🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁡󠁺󠁸󠁩󠁺󠁿 Flag for Khizi (AZ-XIZ) 👨🏽‍❤️‍👨🏼 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁡󠁺󠁸󠁣󠁩󠁿 Flag for Khojali (AZ-XCI) 🏴󠁶󠁥󠁹󠁿 Flag for Delta Amacuro (VE-Y) 🏴󠁡󠁺󠁸󠁡󠁿 Flag for Stepanakert (AZ-XA) 🏴󠁡󠁺󠁹󠁡󠁲󠁿 Flag for Yardymli (AZ-YAR) 🏴󠁡󠁺󠁹󠁥󠁶󠁿 Flag for Yevlakh District (AZ-YEV) 🏴󠁡󠁺󠁺󠁡󠁱󠁿 Flag for Zaqatala (AZ-ZAQ) 👩🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁡󠁺󠁹󠁥󠁿 Flag for Yevlakh (AZ-YE) 🏴󠁢󠁡󠁢󠁩󠁨󠁿 Flag for Federation of Bosnia and Herzegovina (BA-BIH) 🏴󠁡󠁺󠁺󠁡󠁲󠁿 Flag for Zardab (AZ-ZAR) 🏴󠁡󠁺󠁳󠁡󠁬󠁿 Flag for Salyan (AZ-SAL) 🏴󠁣󠁨󠁺󠁧󠁿 Flag for Zug (CH-ZG) 👨🏾‍👩🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 👨🏿‍👩🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 👩🏿‍👶🏿 Family - Woman: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁡󠁳󠁲󠁰󠁿 Flag for Republika Srpska (BA-SRP) 👨🏽‍❤️‍👩 Couple With Heart - Man: Medium Skin Tone, Woman 👨🏻‍👩🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone 🏴󠁥󠁳󠁡󠁮󠁿 Flag for Andalusia (ES-AN) 👨🏼‍👩🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁢󠁢󠀰󠀴󠁿 Flag for Saint James (BB-04) 👨🏾‍❤️‍👩🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁢󠀰󠀳󠁿 Flag for Saint George (BB-03) 🏴󠁢󠁢󠀰󠀲󠁿 Flag for Saint Andrew (BB-02) 👨‍👩‍👶‍👦 Family: Man, Woman, Baby, Boy 👨🏽‍👩🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁢󠁢󠀰󠀵󠁿 Flag for Saint John (BB-05) 👨🏾‍👩🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁢󠀰󠀶󠁿 Flag for Saint Joseph (BB-06) 🏴󠁬󠁫󠀱󠁿 Flag for Western (LK-1) 🏴󠁢󠁹󠁢󠁲󠁿 Flag for Brest (BY-BR) 🏴󠁡󠁺󠁳󠁫󠁲󠁿 Flag for Shamkir (AZ-SKR) 🏴󠁢󠁢󠀰󠀷󠁿 Flag for Saint Lucy (BB-07) 👩🏻‍👶🏻‍👦🏻 Family - Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 🏴󠁥󠁳󠁣󠁭󠁿 Flag for Castile-La Mancha (ES-CM) 🏴󠁢󠁢󠀱󠀰󠁿 Flag for Saint Philip (BB-10) 🏴󠁶󠁣󠀰󠀴󠁿 Flag for Saint George (VC-04) 👨🏻‍👩🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 👩🏻‍👧🏻‍👧🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁤󠁡󠁿 Flag for Barisal (BD-A) 🏴󠁡󠁺󠁺󠁡󠁮󠁿 Flag for Zangilan (AZ-ZAN) 🏴󠁪󠁭󠀰󠀱󠁿 Flag for Kingston (JM-01) 👨🏼‍👩🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁢󠁤󠁥󠁿 Flag for Rajshahi Division (BD-E) 🏴󠁢󠁤󠁦󠁿 Flag for Rangpur Division (BD-F) 🏴󠁢󠁤󠁣󠁿 Flag for Dhaka Division (BD-C) 🏴󠁢󠁤󠁤󠁿 Flag for Khulna Division (BD-D) 🏴󠁢󠁢󠀰󠀹󠁿 Flag for Saint Peter (BB-09) 🏴󠁳󠁩󠀰󠀵󠀸󠁿 Flag for Lenart (SI-058) 👩🏼‍👶🏼 Family - Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁢󠁦󠀰󠀲󠁿 Flag for Cascades (BF-02) 🏴󠁢󠁤󠁨󠁿 Flag for Mymensingh Division (BD-H) 🏴󠁢󠁥󠁷󠁡󠁬󠁿 Flag for Wallonia (BE-WAL) 🏴󠁭󠁵󠁢󠁲󠁿 Flag for Beau-Bassin Rose-Hill (MU-BR) 🏴󠁢󠁦󠀰󠀴󠁿 Flag for Centre-Est (BF-04) 🏴󠁣󠁮󠀹󠀱󠁿 Flag for Hong Kong SAR China (CN-91) 🏴󠁢󠁦󠀰󠀱󠁿 Flag for Boucle du Mouhoun (BF-01) 🏴󠁢󠁦󠀰󠀳󠁿 Flag for Centre (BF-03) 🏴󠁤󠁫󠀸󠀲󠁿 Flag for Central Denmark (DK-82) 🏴󠁢󠁦󠀰󠀷󠁿 Flag for Centre-Sud (BF-07) 👨🏽‍👩🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁢󠁦󠀰󠀶󠁿 Flag for Centre-Ouest (BF-06) 🏴󠁢󠁦󠀰󠀵󠁿 Flag for Centre-Nord (BF-05) 🏴󠁢󠁢󠀰󠀸󠁿 Flag for Saint Michael (BB-08) 🏴󠁢󠁢󠀱󠀱󠁿 Flag for Saint Thomas (BB-11) 👨🏽‍❤️‍👩🏿 Couple With Heart - Man: Medium Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁦󠀰󠀸󠁿 Flag for Est (BF-08) 🏴󠁢󠁥󠁢󠁲󠁵󠁿 Flag for Brussels (BE-BRU) 🏴󠁢󠁤󠁧󠁿 Flag for Sylhet Division (BD-G) 🏴󠁢󠁦󠀱󠀱󠁿 Flag for Plateau-Central (BF-11) 🏴󠁢󠁤󠁢󠁿 Flag for Chittagong Division (BD-B) 🏴󠁢󠁦󠀱󠀳󠁿 Flag for Sud-Ouest (BF-13) 👨🏾‍👩🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁧󠀰󠀵󠁿 Flag for Vidin (BG-05) 🏴󠁢󠁧󠀰󠀳󠁿 Flag for Varna (BG-03) 👨🏿‍❤️‍👩🏽 Couple With Heart - Man: Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁧󠀰󠀲󠁿 Flag for Burgas (BG-02) 🏴󠁢󠁦󠀱󠀰󠁿 Flag for Nord (BF-10) 🏴󠁢󠁧󠀰󠀴󠁿 Flag for Veliko Tarnovo (BG-04) 👨🏽‍👩🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁧󠀰󠀷󠁿 Flag for Gabrovo (BG-07) 👨🏿‍👩🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁧󠀰󠀸󠁿 Flag for Dobrich (BG-08) 🏴󠁢󠁦󠀱󠀲󠁿 Flag for Sahel (BF-12) 🏴󠁡󠁵󠁴󠁡󠁳󠁿 Flag for Tasmania (AU-TAS) 👨🏿‍❤️‍👩🏻 Couple With Heart - Man: Dark Skin Tone, Woman: Light Skin Tone 👩🏻‍👧🏻‍👦🏻 Family - Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 👨🏻‍👩🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👩🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👩🏾 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁡󠁺󠁸󠁶󠁤󠁿 Flag for Khojavend (AZ-XVD) 🏴󠁢󠁧󠀱󠀱󠁿 Flag for Lovech (BG-11) 🏴󠁣󠁬󠁬󠁩󠁿 Flag for Libertador General Bernardo O’Higgins (CL-LI) 🏴󠁢󠁧󠀱󠀳󠁿 Flag for Pazardzhik (BG-13) 👨🏿‍❤️‍👩🏿 Couple With Heart - Man: Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁧󠀱󠀴󠁿 Flag for Pernik (BG-14) 🏴󠁢󠁧󠀱󠀰󠁿 Flag for Kyustendil (BG-10) 🏴󠁥󠁧󠁢󠁡󠁿 Flag for Red Sea (EG-BA) 🏴󠁴󠁺󠀱󠀱󠁿 Flag for Zanzibar Central/South (TZ-11) 👨🏿‍👩🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁧󠀱󠀵󠁿 Flag for Pleven (BG-15) 👨🏿‍👨🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 👨🏽‍👩🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁧󠀲󠀱󠁿 Flag for Smolyan (BG-21) 🏴󠁢󠁧󠀰󠀱󠁿 Flag for Blagoevgrad (BG-01) 🏴󠁤󠁺󠀳󠀴󠁿 Flag for Bordj Bou Arréridj (DZ-34) 🏴󠁢󠁧󠀱󠀶󠁿 Flag for Plovdiv (BG-16) 🏴󠁣󠁩󠁶󠁢󠁿 Flag for Vallée du Bandama (CI-VB) 🏴󠁢󠁧󠀱󠀹󠁿 Flag for Silistra (BG-19) 👩‍❤️‍👨🏼 Couple With Heart - Woman, Man: Medium-Light Skin Tone 🏴󠁢󠁧󠀱󠀷󠁿 Flag for Razgrad (BG-17) 👨🏾‍❤️‍👨 Couple With Heart - Man: Medium-Dark Skin Tone, Man 🏴󠁡󠁯󠁣󠁮󠁮󠁿 Flag for Cunene (AO-CNN) 🏴󠁢󠁧󠀲󠀰󠁿 Flag for Sliven (BG-20) 🧕🏻‍♀️ Woman With Headscarf: Light Skin Tone 🏴󠁢󠁧󠀲󠀵󠁿 Flag for Targovishte (BG-25) 👩🏼‍👩🏼‍👶🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏾‍👩🏾‍👶🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁧󠀲󠀳󠁿 Flag for Sofia District (BG-23) 🏴󠁢󠁧󠀲󠀲󠁿 Flag for Sofia (BG-22) 👨🏿‍👩🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 👨🏻‍❤️‍💋‍👩🏾 Kiss - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone 🧕🏽‍♀️ Woman With Headscarf: Medium Skin Tone 🏴󠁢󠁧󠀲󠀸󠁿 Flag for Yambol (BG-28) 🏴󠁢󠁨󠀱󠀳󠁿 Flag for Capital (BH-13) 🏴󠁢󠁧󠀲󠀶󠁿 Flag for Haskovo (BG-26) 🏴󠁬󠁩󠀰󠀷󠁿 Flag for Schaan (LI-07) 👨🏿‍👩🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁨󠀱󠀵󠁿 Flag for Muharraq (BH-15) 🏴󠁢󠁨󠀱󠀴󠁿 Flag for Southern (BH-14) 🧕🏾‍♀️ Woman With Headscarf: Medium-Dark Skin Tone 🏴󠁲󠁯󠁳󠁢󠁿 Flag for Sibiu (RO-SB) 🧕🏼‍♀️ Woman With Headscarf: Medium-Light Skin Tone 👩🏻‍❤️‍👨🏿 Couple With Heart - Woman: Light Skin Tone, Man: Dark Skin Tone 🏴󠁢󠁨󠀱󠀷󠁿 Flag for Northern (BH-17) 🏴󠁢󠁩󠁢󠁢󠁿 Flag for Bubanza (BI-BB) 👩🏻‍❤️‍👩 Couple With Heart - Woman: Light Skin Tone, Woman 🏴󠁢󠁥󠁶󠁬󠁧󠁿 Flag for Flanders (BE-VLG) 👩🏽‍👧🏽 Family - Woman: Medium Skin Tone, Girl: Medium Skin Tone 👨🏻‍👩🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁢󠁩󠁢󠁭󠁿 Flag for Bujumbura (BI-BM) 🧕🏿‍♀️ Woman With Headscarf: Dark Skin Tone 🏴󠁢󠁩󠁢󠁬󠁿 Flag for Bujumbura Rural (BI-BL) 👨🏾‍❤️‍💋‍👩🏽 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone 👨🏼‍👩🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏻‍👨🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 🏴󠁢󠁩󠁣󠁡󠁿 Flag for Cankuzo (BI-CA) 🏴󠁢󠁧󠀱󠀲󠁿 Flag for Montana (BG-12) 🏴󠁬󠁶󠀰󠀸󠀵󠁿 Flag for Sala (LV-085) ⃣ Combining Enclosing Keycap 🏴󠁢󠁩󠁢󠁲󠁿 Flag for Bururi (BI-BR) 🏴󠁢󠁧󠀰󠀹󠁿 Flag for Kardzhali (BG-09) 🏴󠁢󠁩󠁲󠁭󠁿 Flag for Rumonge (BI-RM) 🏴󠁮󠁬󠁡󠁷󠁿 Flag for Aruba (NL-AW) 🏴󠁢󠁩󠁭󠁹󠁿 Flag for Muyinga (BI-MY) 🏴󠁢󠁩󠁲󠁴󠁿 Flag for Rutana (BI-RT) 🏴󠁢󠁩󠁲󠁹󠁿 Flag for Ruyigi (BI-RY) 🏴󠁢󠁩󠁫󠁩󠁿 Flag for Kirundo (BI-KI) 🏴󠁢󠁩󠁫󠁹󠁿 Flag for Kayanza (BI-KY) 🏴󠁢󠁩󠁭󠁷󠁿 Flag for Mwaro (BI-MW) 🏴󠁢󠁧󠀲󠀷󠁿 Flag for Shumen (BG-27) 🏴󠁢󠁩󠁮󠁧󠁿 Flag for Ngozi (BI-NG) 🏴󠁢󠁩󠁫󠁲󠁿 Flag for Karuzi (BI-KR) 🏴󠁢󠁩󠁭󠁵󠁿 Flag for Muramvya (BI-MU) 🏴󠁭󠁡󠀱󠀵󠁿 Flag for Laâyoune-Boujdour-Sakia El Hamra (MA-15) 👨🏽‍👩🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 👩🏾‍👨🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏾‍👩🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁪󠁤󠁯󠁿 Flag for Donga (BJ-DO) 👩🏽‍👨🏽‍👶🏽‍👦🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 👨🏽‍❤️‍💋‍👩🏼 Kiss - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁦󠁲󠁨󠁤󠁦󠁿 Flag for Hauts-de-France (FR-HDF) 🏴󠁢󠁪󠁡󠁬󠁿 Flag for Alibori (BJ-AL) 🏴󠁢󠁪󠁡󠁫󠁿 Flag for Atakora (BJ-AK) 👨🏿‍👩🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Woman: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁪󠁬󠁩󠁿 Flag for Littoral (BJ-LI) 🏴󠁢󠁪󠁢󠁯󠁿 Flag for Borgou (BJ-BO) 👩‍👩‍👧‍👶 Family: Woman, Woman, Girl, Baby 🏴󠁵󠁳󠁮󠁤󠁿 Flag for North Dakota (US-ND) 👨🏼‍❤️‍💋‍👨🏾 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁪󠁫󠁯󠁿 Flag for Kouffo (BJ-KO) 🏴󠁢󠁪󠁰󠁬󠁿 Flag for Plateau (BJ-PL) 🏴󠁧󠁤󠀱󠀰󠁿 Flag for Carriacou and Petite Martinique (GD-10) 🏴󠁢󠁪󠁺󠁯󠁿 Flag for Zou (BJ-ZO) 👩🏼‍❤️‍👨🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍👨🏽 Couple With Heart - Woman: Medium Skin Tone, Man: Medium Skin Tone 👨🏽‍❤️‍👩🏼 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Light Skin Tone 👩🏽‍❤️‍👨🏻 Couple With Heart - Woman: Medium Skin Tone, Man: Light Skin Tone 🏴󠁬󠁢󠁢󠁩󠁿 Flag for Beqaa (LB-BI) 🏴󠁢󠁮󠁴󠁥󠁿 Flag for Temburong (BN-TE) 👩🏻‍👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁮󠁴󠁵󠁿 Flag for Tutong (BN-TU) 🏴󠁢󠁮󠁢󠁭󠁿 Flag for Brunei-Muara (BN-BM) 👨🏻‍👩🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁧󠀰󠀶󠁿 Flag for Vratsa (BG-06) 👩🏽‍❤️‍👨🏼 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁯󠁢󠁿 Flag for Beni (BO-B) 🏴󠁢󠁮󠁢󠁥󠁿 Flag for Belait (BN-BE) 👩🏼‍❤️‍👨 Couple With Heart - Woman: Medium-Light Skin Tone, Man 🏴󠁢󠁪󠁯󠁵󠁿 Flag for Ouémé (BJ-OU) 👩🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁳󠁣󠀲󠀵󠁿 Flag for Roche Caiman (SC-25) 👩🏻‍❤️‍👨🏾 Couple With Heart - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁯󠁣󠁿 Flag for Cochabamba (BO-C) 👨🏾‍👩🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁢󠁯󠁮󠁿 Flag for Pando (BO-N) 👩🏽‍❤️‍👩🏻 Couple With Heart - Woman: Medium Skin Tone, Woman: Light Skin Tone 👩🏾‍❤️‍👨🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁯󠁨󠁿 Flag for Chuquisaca (BO-H) 🏴󠁢󠁯󠁬󠁿 Flag for La Paz (BO-L) 🏴󠁭󠁮󠀰󠀳󠀹󠁿 Flag for Khentii (MN-039) 🕴🏽‍♀️ Woman in Business Suit Levitating: Medium Skin Tone 🏴󠁭󠁫󠀲󠀷󠁿 Flag for Dolneni (MK-27) 🏴󠁢󠁧󠀲󠀴󠁿 Flag for Stara Zagora (BG-24) 👩🏽‍👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁩󠁲󠀱󠀳󠁿 Flag for Sistan and Baluchestan (IR-13) 👩🏾‍❤️‍👨🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁯󠁰󠁿 Flag for Potosí (BO-P) 🏴󠁢󠁱󠁢󠁯󠁿 Flag for Bonaire (BQ-BO) 👩‍❤️‍💋‍👨🏻 Kiss - Woman, Man: Light Skin Tone 👩🏾‍❤️‍👨 Couple With Heart - Woman: Medium-Dark Skin Tone, Man 👩🏼‍👦🏼‍👦🏼 Family - Woman: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁢󠁡󠁢󠁲󠁣󠁿 Flag for Brčko District (BA-BRC) 🏴󠁢󠁱󠁳󠁡󠁿 Flag for Saba (BQ-SA) 👩🏽‍❤️‍👨🏾 Couple With Heart - Woman: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩🏾‍❤️‍👨🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone 🏴󠁢󠁲󠁡󠁣󠁿 Flag for Acre (BR-AC) 🏴󠁢󠁩󠁧󠁩󠁿 Flag for Gitega (BI-GI) 👩🏿‍👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone 👩🏿‍❤️‍👨🏻 Couple With Heart - Woman: Dark Skin Tone, Man: Light Skin Tone 🏴󠁢󠁲󠁡󠁭󠁿 Flag for Amazonas (BR-AM) 🏴󠁡󠁲󠁣󠁿 Flag for Buenos Aires (AR-C) 👨🏼‍👩🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏼‍❤️‍💋‍👨🏼 Kiss - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁲󠁥󠁳󠁿 Flag for Espírito Santo (BR-ES) 👨🏿‍❤️‍💋‍👨🏾 Kiss - Man: Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👨🏽 Kiss - Man: Medium-Light Skin Tone, Man: Medium Skin Tone 👩🏾‍👦🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏻‍❤️‍👩 Couple With Heart - Man: Light Skin Tone, Woman 👨🏿‍❤️‍💋‍👩🏾 Kiss - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏽 Kiss - Woman: Light Skin Tone, Woman: Medium Skin Tone 👨🏼‍❤️‍💋‍👨🏿 Kiss - Man: Medium-Light Skin Tone, Man: Dark Skin Tone 👩🏽‍👦🏽‍👦🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏿‍❤️‍👩🏼 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁲󠁭󠁡󠁿 Flag for Maranhão (BR-MA) 👩🏿‍❤️‍👩🏽 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium Skin Tone 👩🏿‍❤️‍👩 Couple With Heart - Woman: Dark Skin Tone, Woman 🏴󠁢󠁲󠁡󠁰󠁿 Flag for Amapá (BR-AP) 👨🏽‍❤️‍👨🏻 Couple With Heart - Man: Medium Skin Tone, Man: Light Skin Tone 👩🏻‍❤️‍💋‍👨🏻 Kiss - Woman: Light Skin Tone, Man: Light Skin Tone 👨🏽‍❤️‍💋‍👨🏽 Kiss - Man: Medium Skin Tone, Man: Medium Skin Tone 👩🏿‍❤️‍💋‍👩🏻 Kiss - Woman: Dark Skin Tone, Woman: Light Skin Tone 👨🏽‍❤️‍💋‍👩🏿 Kiss - Man: Medium Skin Tone, Woman: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏾 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏿‍❤️‍💋‍👨🏼 Kiss - Man: Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👨🏿 Kiss - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone 👩🏽‍❤️‍💋‍👩🏿 Kiss - Woman: Medium Skin Tone, Woman: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏿 Kiss - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone 👨🏽‍❤️‍💋‍👩🏽 Kiss - Man: Medium Skin Tone, Woman: Medium Skin Tone 👨🏾‍❤️‍💋‍👨🏼 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏽‍❤️‍💋‍👩🏻 Kiss - Man: Medium Skin Tone, Woman: Light Skin Tone 👨🏾‍❤️‍💋‍👨 Kiss - Man: Medium-Dark Skin Tone, Man 👨🏾‍❤️‍💋‍👨🏾 Kiss - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍💋‍👨🏾 Kiss - Woman, Man: Medium-Dark Skin Tone 👩‍❤️‍💋‍👩🏻 Kiss - Woman, Woman: Light Skin Tone 👩🏽‍❤️‍💋‍👨🏻 Kiss - Woman: Medium Skin Tone, Man: Light Skin Tone 👩🏿‍❤️‍💋‍👨🏿 Kiss - Woman: Dark Skin Tone, Man: Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏿 Kiss - Woman: Light Skin Tone, Woman: Dark Skin Tone 👩🏻‍❤️‍💋‍👩🏼 Kiss - Woman: Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏾‍❤️‍💋‍👩🏿 Kiss - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone 👩🏾‍❤️‍💋‍👩 Kiss - Woman: Medium-Dark Skin Tone, Woman 👩🏾‍❤️‍💋‍👩🏻 Kiss - Woman: Medium-Dark Skin Tone, Woman: Light Skin Tone 👩🏻‍❤️‍👨 Couple With Heart - Woman: Light Skin Tone, Man 👩🏻‍👩🏻‍👦🏻 Family - Woman: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone 👩🏾‍❤️‍💋‍👨🏾 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏻‍❤️‍👨🏽 Couple With Heart - Man: Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁲󠁭󠁴󠁿 Flag for Mato Grosso (BR-MT) 👨🏽‍❤️‍👩🏻 Couple With Heart - Man: Medium Skin Tone, Woman: Light Skin Tone 👨‍❤️‍👨🏿 Couple With Heart - Man, Man: Dark Skin Tone 👩🏿‍❤️‍💋‍👨🏼 Kiss - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone 👩🏿‍❤️‍💋‍👩🏾 Kiss - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏻‍👦🏻‍👧🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁯󠁳󠁿 Flag for Santa Cruz (BO-S) 👨🏻‍❤️‍👩🏽 Couple With Heart - Man: Light Skin Tone, Woman: Medium Skin Tone 👨🏽‍❤️‍👩🏽 Couple With Heart - Man: Medium Skin Tone, Woman: Medium Skin Tone 👩🏾‍❤️‍💋‍👩🏽 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁪󠁣󠁯󠁿 Flag for Collines (BJ-CO) 👨🏻‍👩🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Boy: Light Skin Tone 👨‍❤️‍👨🏽 Couple With Heart - Man, Man: Medium Skin Tone 👨🏾‍👩🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏼‍❤️‍👨 Couple With Heart - Man: Medium-Light Skin Tone, Man 👨🏾‍❤️‍👩🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁲󠁰󠁡󠁿 Flag for Pará (BR-PA) 👩🏽‍👦🏽‍👧🏽 Family - Woman: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👨🏼‍❤️‍👨🏼 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 👨🏿‍❤️‍👨🏻 Couple With Heart - Man: Dark Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍👩🏽 Couple With Heart - Woman: Medium Skin Tone, Woman: Medium Skin Tone 👨🏾‍❤️‍👨🏽 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium Skin Tone 👨🏽‍❤️‍👨🏽 Couple With Heart - Man: Medium Skin Tone, Man: Medium Skin Tone 👨🏻‍❤️‍👩🏼 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏾‍❤️‍👩🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Dark Skin Tone 👨🏾‍❤️‍👨🏼 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 👩🏾‍❤️‍💋‍👩🏾 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩🏼‍❤️‍👩🏻 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Light Skin Tone 👨🏿‍❤️‍👩🏼 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👨🏾 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍👨🏾 Couple With Heart - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍👨🏾 Couple With Heart - Woman, Man: Medium-Dark Skin Tone 🏴󠁢󠁲󠁡󠁬󠁿 Flag for Alagoas (BR-AL) 👩‍❤️‍👨🏻 Couple With Heart - Woman, Man: Light Skin Tone 🏴󠁢󠁦󠀰󠀹󠁿 Flag for Hauts-Bassins (BF-09) 👨🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👩🏾‍❤️‍👩🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁢󠁲󠁲󠁪󠁿 Flag for Rio de Janeiro (BR-RJ) 👨🏾‍❤️‍💋‍👩🏻 Kiss - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁲󠁲󠁯󠁿 Flag for Rondônia (BR-RO) 👨🏾‍❤️‍👨🏿 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Dark Skin Tone 👨🏽‍👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone 👨🏼‍❤️‍👨🏽 Couple With Heart - Man: Medium-Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁲󠁰󠁩󠁿 Flag for Piauí (BR-PI) 👨🏽‍👩🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁢󠁲󠁲󠁮󠁿 Flag for Rio Grande do Norte (BR-RN) 👩🏻‍❤️‍👨🏻 Couple With Heart - Woman: Light Skin Tone, Man: Light Skin Tone 👨🏻‍👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone 👩🏼‍❤️‍👩🏾 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏿‍❤️‍👩🏾 Couple With Heart - Man: Dark Skin Tone, Woman: Medium-Dark Skin Tone 🏴󠁢󠁲󠁳󠁥󠁿 Flag for Sergipe (BR-SE) 🏴󠁢󠁲󠁰󠁲󠁿 Flag for Paraná (BR-PR) 👨🏿‍👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone 👩🏼‍❤️‍👩🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium Skin Tone 👩🏾‍❤️‍👩🏼 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁲󠁵󠁭󠁯󠁳󠁿 Flag for Moscow Province (RU-MOS) 👩🏽‍❤️‍💋‍👩🏽 Kiss - Woman: Medium Skin Tone, Woman: Medium Skin Tone 👩🏿‍👦🏿‍👦🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁲󠁳󠁰󠁿 Flag for São Paulo (BR-SP) 🏴󠁩󠁲󠀰󠀱󠁿 Flag for East Azerbaijan (IR-01) 🏴󠁢󠁲󠁲󠁳󠁿 Flag for Rio Grande do Sul (BR-RS) 👩🏼‍❤️‍👨🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Dark Skin Tone 🏴󠁮󠁯󠀱󠀴󠁿 Flag for Sogn og Fjordane (NO-14) 🏴󠁢󠁲󠁴󠁯󠁿 Flag for Tocantins (BR-TO) 🏴󠁳󠁩󠀱󠀸󠀲󠁿 Flag for Sveti Andraž v Slovenskih Goricah (SI-182) 👨🏼‍❤️‍👩🏻 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Light Skin Tone 👨🏿‍❤️‍👨🏽 Couple With Heart - Man: Dark Skin Tone, Man: Medium Skin Tone 👨🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👨🏿‍👦🏿‍👦🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁳󠁢󠁩󠁿 Flag for Bimini (BS-BI) 👨🏿‍❤️‍👩 Couple With Heart - Man: Dark Skin Tone, Woman 👩🏻‍👦🏻‍👦🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁲󠁲󠁲󠁿 Flag for Roraima (BR-RR) 🏴󠁢󠁯󠁯󠁿 Flag for Oruro (BO-O) 🏴󠁢󠁳󠁥󠁸󠁿 Flag for Exuma (BS-EX) 👨🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👩🏽‍❤️‍👨 Couple With Heart - Woman: Medium Skin Tone, Man 🏴󠁢󠁳󠁣󠁥󠁿 Flag for Central Eleuthera (BS-CE) 🏴󠁢󠁳󠁢󠁹󠁿 Flag for Berry Islands (BS-BY) 🏴󠁢󠁩󠁭󠁡󠁿 Flag for Makamba (BI-MA) 🏴󠁢󠁲󠁤󠁦󠁿 Flag for Federal District (BR-DF) 👩🏻‍❤️‍👩🏾 Couple With Heart - Woman: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏼 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 🏴󠁢󠁳󠁣󠁯󠁿 Flag for Central Abaco (BS-CO) 🏴󠁢󠁳󠁥󠁧󠁿 Flag for East Grand Bahama (BS-EG) 🏴󠁢󠁳󠁣󠁳󠁿 Flag for Central Andros (BS-CS) 👨🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁢󠁳󠁣󠁫󠁿 Flag for Crooked Island (BS-CK) 🏴󠁢󠁳󠁢󠁰󠁿 Flag for Black Point (BS-BP) 👨🏼‍👦🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 👩🏿‍❤️‍👨🏾 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone 👩🏾‍❤️‍💋‍👨🏼 Kiss - Woman: Medium-Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁮󠁥󠁿 Flag for North Eleuthera (BS-NE) 🏴󠁢󠁳󠁮󠁯󠁿 Flag for North Abaco (BS-NO) 🏴󠁢󠁳󠁭󠁧󠁿 Flag for Mayaguana (BS-MG) 👨🏾‍👦🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏻 Kiss - Man: Medium-Light Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁳󠁧󠁣󠁿 Flag for Grand Cay (BS-GC) 🏴󠁢󠁳󠁦󠁰󠁿 Flag for Freeport (BS-FP) 🏴󠁢󠁳󠁩󠁮󠁿 Flag for Inagua (BS-IN) 🏴󠁢󠁳󠁨󠁴󠁿 Flag for Hope Town (BS-HT) 👩🏾‍❤️‍👩🏿 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁳󠁬󠁩󠁿 Flag for Long Island (BS-LI) 👨🏿‍👦🏿‍👧🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 👨🏾‍❤️‍👩 Couple With Heart - Man: Medium-Dark Skin Tone, Woman 👩🏿‍❤️‍👨🏿 Couple With Heart - Woman: Dark Skin Tone, Man: Dark Skin Tone 👨🏻‍👦🏻‍👶🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 👨‍👨‍👶 Family: Man, Man, Baby 👩‍👧‍👶 Family: Woman, Girl, Baby 👨‍👦‍👶 Family: Man, Boy, Baby 👨‍👨‍👶‍👦 Family: Man, Man, Baby, Boy 👨‍👦‍👧 Family: Man, Boy, Girl 👨‍👶‍👶 Family: Man, Baby, Baby 🏴󠁢󠁳󠁲󠁩󠁿 Flag for Ragged Island (BS-RI) 👩🏿‍❤️‍👩🏿 Couple With Heart - Woman: Dark Skin Tone, Woman: Dark Skin Tone 👩🏿‍❤️‍👨🏽 Couple With Heart - Woman: Dark Skin Tone, Man: Medium Skin Tone 👩🏼‍❤️‍👨🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁮󠁳󠁿 Flag for North Andros (BS-NS) 👩🏿‍❤️‍👩🏻 Couple With Heart - Woman: Dark Skin Tone, Woman: Light Skin Tone 👨🏻‍❤️‍💋‍👨 Kiss - Man: Light Skin Tone, Man 🏴󠁢󠁳󠁳󠁡󠁿 Flag for South Andros (BS-SA) 👨🏻‍❤️‍💋‍👨🏼 Kiss - Man: Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁳󠁳󠁥󠁿 Flag for South Eleuthera (BS-SE) 👨🏼‍👦🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏻‍❤️‍💋‍👩🏻 Kiss - Man: Light Skin Tone, Woman: Light Skin Tone 👨🏼‍❤️‍💋‍👩🏾 Kiss - Man: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏾‍❤️‍💋‍👩🏼 Kiss - Man: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏾‍❤️‍💋‍👨🏻 Kiss - Man: Medium-Dark Skin Tone, Man: Light Skin Tone 🏴󠁢󠁲󠁳󠁣󠁿 Flag for Santa Catarina (BR-SC) 👩‍👩‍👦‍👧 Family: Woman, Woman, Boy, Girl 👨‍❤️‍💋‍👩🏾 Kiss - Man, Woman: Medium-Dark Skin Tone 🏴󠁢󠁳󠁲󠁣󠁿 Flag for Rum Cay (BS-RC) 👩‍👩‍👶‍👦 Family: Woman, Woman, Baby, Boy 👨🏻‍❤️‍💋‍👩🏽 Kiss - Man: Light Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁳󠁣󠁩󠁿 Flag for Cat Island (BS-CI) 👩🏽‍❤️‍👩 Couple With Heart - Woman: Medium Skin Tone, Woman 👨🏽‍👦🏽‍👶🏽 Family - Man: Medium Skin Tone, Boy: Medium Skin Tone, Baby: Medium Skin Tone 👩‍👨‍👦‍👶 Family: Woman, Man, Boy, Baby 👨🏾‍❤️‍💋‍👩 Kiss - Man: Medium-Dark Skin Tone, Woman 👨‍❤️‍💋‍👨🏻 Kiss - Man, Man: Light Skin Tone 👨🏻‍❤️‍💋‍👨🏿 Kiss - Man: Light Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍💋‍👩🏽 Kiss - Man: Medium-Light Skin Tone, Woman: Medium Skin Tone 👨🏾‍👦🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁢󠁳󠁳󠁯󠁿 Flag for South Abaco (BS-SO) 👩🏾‍❤️‍💋‍👩🏼 Kiss - Woman: Medium-Dark Skin Tone, Woman: Medium-Light Skin Tone 👨🏻‍❤️‍👨🏿 Couple With Heart - Man: Light Skin Tone, Man: Dark Skin Tone 👨🏿‍❤️‍💋‍👨🏿 Kiss - Man: Dark Skin Tone, Man: Dark Skin Tone 👩🏾‍❤️‍💋‍👨🏿 Kiss - Woman: Medium-Dark Skin Tone, Man: Dark Skin Tone 👩🏼‍❤️‍💋‍👨🏽 Kiss - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone 👩🏾‍❤️‍💋‍👨🏻 Kiss - Woman: Medium-Dark Skin Tone, Man: Light Skin Tone 👩🏽‍❤️‍💋‍👨 Kiss - Woman: Medium Skin Tone, Man 👨‍👧‍👶 Family: Man, Girl, Baby 👩🏻‍❤️‍💋‍👨🏾 Kiss - Woman: Light Skin Tone, Man: Medium-Dark Skin Tone 👨‍❤️‍👨🏼 Couple With Heart - Man, Man: Medium-Light Skin Tone 👩🏼‍❤️‍💋‍👩🏼 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏿‍❤️‍💋‍👩🏿 Kiss - Man: Dark Skin Tone, Woman: Dark Skin Tone 👨‍❤️‍💋‍👩🏼 Kiss - Man, Woman: Medium-Light Skin Tone 🏴󠁣󠁩󠁡󠁢󠁿 Flag for Abidjan (CI-AB) 👩🏻‍❤️‍💋‍👨 Kiss - Woman: Light Skin Tone, Man 👩🏼‍❤️‍💋‍👩🏾 Kiss - Woman: Medium-Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏻‍❤️‍💋‍👩🏼 Kiss - Man: Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏽‍❤️‍💋‍👨🏿 Kiss - Woman: Medium Skin Tone, Man: Dark Skin Tone 👩🏿‍❤️‍💋‍👩🏼 Kiss - Woman: Dark Skin Tone, Woman: Medium-Light Skin Tone 👩🏿‍❤️‍💋‍👨🏾 Kiss - Woman: Dark Skin Tone, Man: Medium-Dark Skin Tone 👩🏼‍❤️‍💋‍👨 Kiss - Woman: Medium-Light Skin Tone, Man 👩‍❤️‍👩🏾 Couple With Heart - Woman, Woman: Medium-Dark Skin Tone 👨🏿‍❤️‍👨🏼 Couple With Heart - Man: Dark Skin Tone, Man: Medium-Light Skin Tone 👨🏿‍👦🏿‍👶🏿 Family - Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 👨🏼‍❤️‍👩🏼 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👩🏼‍❤️‍👨🏽 Couple With Heart - Woman: Medium-Light Skin Tone, Man: Medium Skin Tone 🏴󠁢󠁳󠁳󠁷󠁿 Flag for Spanish Wells (BS-SW) 👨🏿‍❤️‍👨🏿 Couple With Heart - Man: Dark Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍👨🏿 Couple With Heart - Man: Medium-Light Skin Tone, Man: Dark Skin Tone 👨🏼‍❤️‍👩 Couple With Heart - Man: Medium-Light Skin Tone, Woman 👩🏼‍❤️‍👩🏼 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👨🏻 Couple With Heart - Man: Medium-Light Skin Tone, Man: Light Skin Tone 👨🏾‍❤️‍👨🏾 Couple With Heart - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👩‍❤️‍👩🏼 Couple With Heart - Woman, Woman: Medium-Light Skin Tone 👨🏼‍❤️‍👩🏿 Couple With Heart - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨🏻‍❤️‍👨🏾 Couple With Heart - Man: Light Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍👩🏾 Couple With Heart - Man: Medium Skin Tone, Woman: Medium-Dark Skin Tone 👩‍❤️‍👩🏿 Couple With Heart - Woman, Woman: Dark Skin Tone 👨🏽‍❤️‍👨🏿 Couple With Heart - Man: Medium Skin Tone, Man: Dark Skin Tone 👨‍👨‍👦‍👶 Family: Man, Man, Boy, Baby 👨🏿‍❤️‍👨 Couple With Heart - Man: Dark Skin Tone, Man 👩🏻‍❤️‍👩🏿 Couple With Heart - Woman: Light Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁳󠁳󠁳󠁿 Flag for San Salvador (BS-SS) 🏴󠁢󠁴󠀱󠀴󠁿 Flag for Samtse (BT-14) 👩🏻‍❤️‍👨🏽 Couple With Heart - Woman: Light Skin Tone, Man: Medium Skin Tone 👩🏼‍❤️‍👩🏿 Couple With Heart - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨‍❤️‍👩🏿 Couple With Heart - Man, Woman: Dark Skin Tone 🏴󠁢󠁴󠀱󠀱󠁿 Flag for Paro (BT-11) 👨🏻‍❤️‍👩🏾 Couple With Heart - Man: Light Skin Tone, Woman: Medium-Dark Skin Tone 👨🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁢󠁴󠀱󠀵󠁿 Flag for Thimphu (BT-15) 👩🏾‍❤️‍👩🏽 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman: Medium Skin Tone 🏴󠁢󠁳󠁷󠁧󠁿 Flag for West Grand Bahama (BS-WG) 🏴󠁢󠁴󠀱󠀳󠁿 Flag for Haa (BT-13) 🏴󠁢󠁴󠀱󠀲󠁿 Flag for Chukha (BT-12) 👨🏻‍❤️‍💋‍👨🏽 Kiss - Man: Light Skin Tone, Man: Medium Skin Tone 👨🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 👨🏽‍👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁳󠁡󠁫󠁿 Flag for Acklins (BS-AK) 🏴󠁢󠁴󠀳󠀲󠁿 Flag for Trongsa (BT-32) 🏴󠁢󠁴󠀴󠀱󠁿 Flag for Trashigang (BT-41) 🏴󠁢󠁴󠀲󠀳󠁿 Flag for Punakha (BT-23) 🏴󠁢󠁴󠀲󠀴󠁿 Flag for Wangdue Phodrang (BT-24) 🏴󠁢󠁴󠀳󠀳󠁿 Flag for Bumthang (BT-33) 🏴󠁢󠁴󠀳󠀴󠁿 Flag for Zhemgang (BT-34) 👩🏼‍❤️‍💋‍👨🏼 Kiss - Woman: Medium-Light Skin Tone, Man: Medium-Light Skin Tone 🏴󠁢󠁴󠀴󠀲󠁿 Flag for Mongar (BT-42) 🏴󠁢󠁲󠁰󠁢󠁿 Flag for Paraíba (BR-PB) 👩🏿‍❤️‍👨🏼 Couple With Heart - Woman: Dark Skin Tone, Man: Medium-Light Skin Tone 🏴󠁣󠁨󠁺󠁨󠁿 Flag for Zürich (CH-ZH) 🏴󠁢󠁴󠀳󠀱󠁿 Flag for Sarpang (BT-31) 🏴󠁢󠁴󠀲󠀲󠁿 Flag for Dagana (BT-22) 👩🏻‍❤️‍💋‍👨🏽 Kiss - Woman: Light Skin Tone, Man: Medium Skin Tone 👨🏿‍👨🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁷󠁣󠁥󠁿 Flag for Central (BW-CE) 🏴󠁢󠁴󠁧󠁡󠁿 Flag for Gasa (BT-GA) 🏴󠁢󠁷󠁣󠁨󠁿 Flag for Chobe (BW-CH) 🏴󠁢󠁴󠀴󠀵󠁿 Flag for Samdrup Jongkhar (BT-45) 🏴󠁢󠁷󠁦󠁲󠁿 Flag for Francistown (BW-FR) 🏴󠁢󠁴󠀴󠀴󠁿 Flag for Lhuntse (BT-44) 🏴󠁢󠁴󠁴󠁹󠁿 Flag for Trashiyangtse (BT-TY) 🏴󠁢󠁴󠀲󠀱󠁿 Flag for Tsirang (BT-21) 🏴󠁢󠁴󠀴󠀳󠁿 Flag for Pemagatshel (BT-43) 👨🏿‍👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁷󠁮󠁥󠁿 Flag for North East (BW-NE) 🏴󠁢󠁷󠁫󠁬󠁿 Flag for Kgatleng (BW-KL) 🏴󠁢󠁷󠁫󠁧󠁿 Flag for Kgalagadi (BW-KG) 🏴󠁢󠁷󠁳󠁥󠁿 Flag for South East (BW-SE) 🏴󠁢󠁷󠁫󠁷󠁿 Flag for Kweneng (BW-KW) 👨🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁢󠁷󠁮󠁷󠁿 Flag for North West (BW-NW) 🏴󠁢󠁷󠁪󠁷󠁿 Flag for Jwaneng (BW-JW) 🏴󠁢󠁳󠁭󠁣󠁿 Flag for Mangrove Cay (BS-MC) 👩🏼‍❤️‍💋‍👩🏿 Kiss - Woman: Medium-Light Skin Tone, Woman: Dark Skin Tone 🏴󠁢󠁷󠁧󠁨󠁿 Flag for Ghanzi (BW-GH) 👨🏻‍❤️‍👩🏻 Couple With Heart - Man: Light Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁪󠁡󠁱󠁿 Flag for Atlantique (BJ-AQ) 👨🏼‍👧🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 👨🏾‍👧🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 👨🏿‍👧🏿‍👦🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁢󠁷󠁳󠁯󠁿 Flag for Southern (BW-SO) 👨🏽‍👧🏽‍👦🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Boy: Medium Skin Tone 👩🏾‍❤️‍👩 Couple With Heart - Woman: Medium-Dark Skin Tone, Woman 👨‍👩‍👶‍👧 Family: Man, Woman, Baby, Girl 👨🏽‍❤️‍💋‍👨🏾 Kiss - Man: Medium Skin Tone, Man: Medium-Dark Skin Tone 🏴󠁢󠁷󠁳󠁴󠁿 Flag for Sowa Town (BW-ST) 🏴󠁢󠁷󠁳󠁰󠁿 Flag for Selibe Phikwe (BW-SP) 👩🏿‍❤️‍👩🏾 Couple With Heart - Woman: Dark Skin Tone, Woman: Medium-Dark Skin Tone 👩‍👨‍👦‍👦 Family: Woman, Man, Boy, Boy 👩🏿‍👨🏿‍👦🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁢󠁹󠁨󠁭󠁿 Flag for Minsk (BY-HM) 🏴󠁢󠁹󠁨󠁯󠁿 Flag for Homel (BY-HO) 👨🏻‍👦🏻‍👦🏻 Family - Man: Light Skin Tone, Boy: Light Skin Tone, Boy: Light Skin Tone 👨🏻‍👩🏻‍👧🏻‍👦🏻 Family - Man: Light Skin Tone, Woman: Light Skin Tone, Girl: Light Skin Tone, Boy: Light Skin Tone 🏴󠁴󠁲󠀳󠀵󠁿 Flag for Izmir (TR-35) 🏴󠁢󠁹󠁨󠁲󠁿 Flag for Hrodna (BY-HR) 🏴󠁢󠁹󠁭󠁡󠁿 Flag for Magileu (BY-MA) 🏴󠁢󠁹󠁭󠁩󠁿 Flag for Minsk Region (BY-MI) 👨🏼‍❤️‍💋‍👩🏿 Kiss - Man: Medium-Light Skin Tone, Woman: Dark Skin Tone 👨🏾‍❤️‍👩🏻 Couple With Heart - Man: Medium-Dark Skin Tone, Woman: Light Skin Tone 🏴󠁢󠁺󠁢󠁺󠁿 Flag for Belize (BZ-BZ) 🏴󠁢󠁷󠁬󠁯󠁿 Flag for Lobatse (BW-LO) 👩‍👦‍👧 Family: Woman, Boy, Girl 👨🏼‍👧🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁢󠁳󠁭󠁩󠁿 Flag for Moore’s Island (BS-MI) 🏴󠁢󠁪󠁭󠁯󠁿 Flag for Mono (BJ-MO) 👨🏽‍👧🏽‍👧🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁢󠁹󠁶󠁩󠁿 Flag for Vitebsk (BY-VI) 🏴󠁢󠁺󠁳󠁣󠁿 Flag for Stann Creek (BZ-SC) 👨🏾‍👧🏾‍👧🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁢󠁺󠁣󠁺󠁬󠁿 Flag for Corozal (BZ-CZL) 👨🏻‍👧🏻‍👶🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Baby: Light Skin Tone 👨🏿‍👧🏿‍👧🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁢󠁺󠁴󠁯󠁬󠁿 Flag for Toledo (BZ-TOL) 🏴󠁮󠁰󠀵󠁿 Flag for Sudur Pashchimanchal (NP-5) 🏴󠁢󠁳󠁨󠁩󠁿 Flag for Harbour Island (BS-HI) 🏴󠁣󠁡󠁡󠁢󠁿 Flag for Alberta (CA-AB) 👩🏾‍❤️‍👨🏾 Couple With Heart - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone 👨🏽‍❤️‍💋‍👨🏼 Kiss - Man: Medium Skin Tone, Man: Medium-Light Skin Tone 🏴󠁬󠁡󠁶󠁩󠁿 Flag for Vientiane Province (LA-VI) 👨‍👩‍👦‍👧 Family: Man, Woman, Boy, Girl 👨🏻‍👧🏻‍👧🏻 Family - Man: Light Skin Tone, Girl: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👧🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 👨🏽‍👧🏽‍👶🏽 Family - Man: Medium Skin Tone, Girl: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁣󠁡󠁰󠁥󠁿 Flag for Prince Edward Island (CA-PE) 🏴󠁣󠁤󠁫󠁧󠁿 Flag for Kwango (CD-KG) 🏴󠁣󠁡󠁮󠁳󠁿 Flag for Nova Scotia (CA-NS) 👨🏾‍👧🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁣󠁤󠁨󠁵󠁿 Flag for Haut-Uélé (CD-HU) 🏴󠁣󠁤󠁢󠁣󠁿 Flag for Bas-Congo (CD-BC) 🏴󠁣󠁤󠁳󠁵󠁿 Flag for Sud-Ubangi (CD-SU) 🏴󠁣󠁤󠁭󠁡󠁿 Flag for Maniema (CD-MA) 🏴󠁣󠁤󠁳󠁡󠁿 Flag for Sankuru (CD-SA) 🏴󠁣󠁤󠁴󠁵󠁿 Flag for Tshuapa (CD-TU) 🏴󠁣󠁡󠁹󠁴󠁿 Flag for Yukon (CA-YT) 🏴󠁣󠁤󠁭󠁯󠁿 Flag for Mongala (CD-MO) 🏴󠁣󠁦󠁢󠁢󠁿 Flag for Bamingui-Bangoran (CF-BB) 🏴󠁣󠁤󠁭󠁮󠁿 Flag for Mai-Ndombe (CD-MN) 🏴󠁣󠁡󠁮󠁵󠁿 Flag for Nunavut (CA-NU) 🏴󠁣󠁤󠁫󠁬󠁿 Flag for Kwilu (CD-KL) 🏴󠁣󠁡󠁮󠁢󠁿 Flag for New Brunswick (CA-NB) 🏴󠁣󠁦󠁢󠁧󠁦󠁿 Flag for Bangui (CF-BGF) 🏴󠁣󠁤󠁫󠁮󠁿 Flag for Kinshasa (CD-KN) 🏴󠁣󠁤󠁮󠁫󠁿 Flag for North Kivu (CD-NK) 🏴󠁣󠁡󠁮󠁴󠁿 Flag for Northwest Territories (CA-NT) 🏴󠁣󠁤󠁴󠁯󠁿 Flag for Tshopo (CD-TO) 🏴󠁣󠁤󠁢󠁵󠁿 Flag for Bas-Uélé (CD-BU) 🏴󠁣󠁤󠁨󠁬󠁿 Flag for Haut-Lomami (CD-HL) 🏴󠁣󠁤󠁨󠁫󠁿 Flag for Haut-Katanga (CD-HK) 🏴󠁣󠁤󠁫󠁥󠁿 Flag for Kasaï-Oriental (CD-KE) 🏴󠁣󠁤󠁳󠁫󠁿 Flag for South Kivu (CD-SK) 🏴󠁣󠁡󠁯󠁮󠁿 Flag for Ontario (CA-ON) 🏴󠁣󠁦󠁡󠁣󠁿 Flag for Ouham (CF-AC) 🏴󠁣󠁦󠁨󠁳󠁿 Flag for Mambéré-Kadéï (CF-HS) 🏴󠁣󠁤󠁫󠁣󠁿 Flag for Kasaï Central (CD-KC) 🏴󠁣󠁤󠁮󠁵󠁿 Flag for Nord-Ubangi (CD-NU) 🏴󠁣󠁤󠁫󠁳󠁿 Flag for Kasaï (CD-KS) 🏴󠁣󠁤󠁩󠁴󠁿 Flag for Ituri (CD-IT) 🏴󠁣󠁨󠁢󠁥󠁿 Flag for Bern (CH-BE) 🏴󠁣󠁧󠀲󠁿 Flag for Lékoumou (CG-2) 🏴󠁣󠁨󠁡󠁩󠁿 Flag for Appenzell Innerrhoden (CH-AI) 🏴󠁣󠁦󠁭󠁰󠁿 Flag for Ombella-M’Poko (CF-MP) 👨🏻‍👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone 🏴󠁣󠁦󠁫󠁧󠁿 Flag for Kémo (CF-KG) 🏴󠁣󠁧󠀱󠀳󠁿 Flag for Sangha (CG-13) 🏴󠁣󠁨󠁬󠁵󠁿 Flag for Lucerne (CH-LU) 🏴󠁣󠁨󠁧󠁥󠁿 Flag for Geneva (CH-GE) 🏴󠁣󠁨󠁮󠁷󠁿 Flag for Nidwalden (CH-NW) 🏴󠁣󠁧󠀵󠁿 Flag for Kouilou (CG-5) 🏴󠁣󠁧󠀷󠁿 Flag for Likouala (CG-7) 🏴󠁣󠁧󠁢󠁺󠁶󠁿 Flag for Brazzaville (CG-BZV) 🏴󠁣󠁨󠁳󠁨󠁿 Flag for Schaffhausen (CH-SH) 🏴󠁣󠁤󠁬󠁯󠁿 Flag for Lomami (CD-LO) 🏴󠁣󠁨󠁡󠁲󠁿 Flag for Appenzell Ausserrhoden (CH-AR) 🏴󠁣󠁨󠁳󠁺󠁿 Flag for Schwyz (CH-SZ) 🏴󠁣󠁨󠁮󠁥󠁿 Flag for Neuchâtel (CH-NE) 🏴󠁣󠁦󠁯󠁰󠁿 Flag for Ouham-Pendé (CF-OP) 🏴󠁣󠁨󠁧󠁲󠁿 Flag for Graubünden (CH-GR) 🏴󠁣󠁨󠁳󠁯󠁿 Flag for Solothurn (CH-SO) 🏴󠁣󠁨󠁦󠁲󠁿 Flag for Fribourg (CH-FR) 🏴󠁣󠁧󠀱󠀴󠁿 Flag for Plateaux (CG-14) 🏴󠁣󠁦󠁳󠁥󠁿 Flag for Sangha-Mbaéré (CF-SE) 👨🏿‍👧🏿‍👶🏿 Family - Man: Dark Skin Tone, Girl: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁣󠁨󠁡󠁧󠁿 Flag for Aargau (CH-AG) 🏴󠁣󠁧󠀱󠀵󠁿 Flag for Cuvette-Ouest (CG-15) 🏴󠁣󠁨󠁳󠁧󠁿 Flag for St. Gallen (CH-SG) 🏴󠁣󠁧󠀸󠁿 Flag for Cuvette (CG-8) 🏴󠁣󠁨󠁯󠁷󠁿 Flag for Obwalden (CH-OW) 🏴󠁣󠁨󠁢󠁳󠁿 Flag for Basel-Stadt (CH-BS) 🏴󠁣󠁦󠁬󠁢󠁿 Flag for Lobaye (CF-LB) 🏴󠁣󠁬󠁶󠁳󠁿 Flag for Valparaíso (CL-VS) 🏴󠁣󠁭󠁮󠁷󠁿 Flag for Northwest (CM-NW) 🏴󠁣󠁩󠁤󠁮󠁿 Flag for Denguélé (CI-DN) 🏴󠁣󠁭󠁮󠁯󠁿 Flag for North (CM-NO) 🏴󠁣󠁩󠁹󠁭󠁿 Flag for Yamoussoukro (CI-YM) 🏴󠁣󠁭󠁥󠁳󠁿 Flag for East (CM-ES) 👨🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁣󠁩󠁷󠁲󠁿 Flag for Woroba (CI-WR) 🏴󠁣󠁩󠁬󠁧󠁿 Flag for Lagunes (CI-LG) 🏴󠁣󠁩󠁧󠁤󠁿 Flag for Gôh-Djiboua (CI-GD) 🏴󠁣󠁩󠁣󠁭󠁿 Flag for Comoé (CI-CM) 🏴󠁣󠁭󠁳󠁷󠁿 Flag for Southwest (CM-SW) 🏴󠁣󠁬󠁢󠁩󠁿 Flag for Bío Bío (CL-BI) 🏴󠁣󠁬󠁡󠁩󠁿 Flag for Aysén (CL-AI) 🏴󠁣󠁬󠁲󠁭󠁿 Flag for Santiago Metropolitan (CL-RM) 🏴󠁣󠁬󠁴󠁡󠁿 Flag for Tarapacá (CL-TA) 🏴󠁣󠁭󠁳󠁵󠁿 Flag for South (CM-SU) 🏴󠁣󠁬󠁡󠁴󠁿 Flag for Atacama (CL-AT) 🏴󠁣󠁮󠀱󠀲󠁿 Flag for Tianjin (CN-12) 🏴󠁣󠁩󠁬󠁣󠁿 Flag for Lacs (CI-LC) 🏴󠁣󠁬󠁣󠁯󠁿 Flag for Coquimbo (CL-CO) 🏴󠁣󠁬󠁡󠁰󠁿 Flag for Arica y Parinacota (CL-AP) 🏴󠁣󠁭󠁬󠁴󠁿 Flag for Littoral (CM-LT) 🏴󠁣󠁭󠁣󠁥󠁿 Flag for Centre (CM-CE) 🏴󠁣󠁭󠁥󠁮󠁿 Flag for Far North (CM-EN) 🏴󠁣󠁬󠁭󠁡󠁿 Flag for Magallanes Region (CL-MA) 🏴󠁣󠁬󠁭󠁬󠁿 Flag for Maule (CL-ML) 🏴󠁣󠁩󠁭󠁧󠁿 Flag for Montagnes (CI-MG) 🏴󠁣󠁩󠁢󠁳󠁿 Flag for Bas-Sassandra (CI-BS) 🏴󠁣󠁭󠁡󠁤󠁿 Flag for Adamawa (CM-AD) 🏴󠁣󠁬󠁬󠁲󠁿 Flag for Los Ríos (CL-LR) 🏴󠁣󠁭󠁯󠁵󠁿 Flag for West (CM-OU) 🏴󠁣󠁩󠁳󠁶󠁿 Flag for Savanes (CI-SV) 🏴󠁣󠁬󠁬󠁬󠁿 Flag for Los Lagos (CL-LL) 🏴󠁣󠁮󠀳󠀷󠁿 Flag for Shandong (CN-37) 🏴󠁣󠁮󠀶󠀲󠁿 Flag for Gansu (CN-62) 🏴󠁣󠁮󠀳󠀱󠁿 Flag for Shanghai (CN-31) 🏴󠁣󠁮󠀳󠀶󠁿 Flag for Jiangxi (CN-36) 🏴󠁣󠁮󠀷󠀱󠁿 Flag for Taiwan (CN-71) 🏴󠁣󠁯󠁢󠁯󠁹󠁿 Flag for Boyacá (CO-BOY) 🏴󠁣󠁮󠀱󠀱󠁿 Flag for Beijing (CN-11) 🏴󠁢󠁧󠀱󠀸󠁿 Flag for Ruse (BG-18) 🏴󠁣󠁮󠀴󠀴󠁿 Flag for Guangdong (CN-44) 🏴󠁣󠁮󠀶󠀳󠁿 Flag for Qinghai (CN-63) 🏴󠁣󠁮󠀲󠀳󠁿 Flag for Heilongjiang (CN-23) 🏴󠁣󠁮󠀵󠀱󠁿 Flag for Sichuan (CN-51) 🏴󠁣󠁯󠁣󠁡󠁬󠁿 Flag for Caldas (CO-CAL) 🏴󠁣󠁯󠁢󠁯󠁬󠁿 Flag for Bolívar (CO-BOL) 🏴󠁣󠁮󠀵󠀳󠁿 Flag for Yunnan (CN-53) 🏴󠁣󠁯󠁡󠁴󠁬󠁿 Flag for Atlántico (CO-ATL) 🏴󠁣󠁮󠀴󠀲󠁿 Flag for Hubei (CN-42) 🏴󠁣󠁮󠀲󠀲󠁿 Flag for Jilin (CN-22) 🏴󠁣󠁯󠁣󠁡󠁱󠁿 Flag for Caquetá (CO-CAQ) 🏴󠁣󠁮󠀳󠀳󠁿 Flag for Zhejiang (CN-33) 🏴󠁣󠁮󠀱󠀳󠁿 Flag for Hebei (CN-13) 🏴󠁣󠁮󠀱󠀵󠁿 Flag for Inner Mongolia (CN-15) 🏴󠁣󠁮󠀴󠀳󠁿 Flag for Hunan (CN-43) 🏴󠁣󠁦󠁨󠁫󠁿 Flag for Haute-Kotto (CF-HK) 🏴󠁣󠁮󠀶󠀵󠁿 Flag for Xinjiang (CN-65) 🏴󠁣󠁮󠀵󠀰󠁿 Flag for Chongqing (CN-50) 🏴󠁣󠁮󠀴󠀵󠁿 Flag for Guangxi (CN-45) 🏴󠁣󠁮󠀵󠀴󠁿 Flag for Tibet (CN-54) 🏴󠁣󠁮󠀳󠀲󠁿 Flag for Jiangsu (CN-32) 🏴󠁣󠁯󠁡󠁲󠁡󠁿 Flag for Arauca (CO-ARA) 🏴󠁣󠁮󠀳󠀵󠁿 Flag for Fujian (CN-35) 🏴󠁣󠁮󠀴󠀱󠁿 Flag for Henan (CN-41) 🏴󠁣󠁮󠀴󠀶󠁿 Flag for Hainan (CN-46) 🏴󠁣󠁮󠀱󠀴󠁿 Flag for Shanxi (CN-14) 🏴󠁣󠁯󠁭󠁡󠁧󠁿 Flag for Magdalena (CO-MAG) 🏴󠁣󠁯󠁣󠁨󠁯󠁿 Flag for Chocó (CO-CHO) 🏴󠁣󠁯󠁧󠁵󠁡󠁿 Flag for Guainía (CO-GUA) 🏴󠁣󠁯󠁣󠁯󠁲󠁿 Flag for Córdoba (CO-COR) 🏴󠁣󠁯󠁰󠁵󠁴󠁿 Flag for Putumayo (CO-PUT) 🏴󠁣󠁯󠁳󠁡󠁮󠁿 Flag for Santander (CO-SAN) 🏴󠁣󠁵󠀰󠀵󠁿 Flag for Villa Clara (CU-05) 🏴󠁣󠁯󠁶󠁡󠁣󠁿 Flag for Valle del Cauca (CO-VAC) 🏴󠁣󠁯󠁱󠁵󠁩󠁿 Flag for Quindío (CO-QUI) 🏴󠁣󠁯󠁲󠁩󠁳󠁿 Flag for Risaralda (CO-RIS) 🏴󠁣󠁯󠁣󠁵󠁮󠁿 Flag for Cundinamarca (CO-CUN) 👨🏽‍👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁣󠁲󠁡󠁿 Flag for Alajuela (CR-A) 🏴󠁣󠁲󠁰󠁿 Flag for Puntarenas (CR-P) 🏴󠁣󠁯󠁨󠁵󠁩󠁿 Flag for Huila (CO-HUI) 🏴󠁣󠁯󠁶󠁡󠁵󠁿 Flag for Vaupés (CO-VAU) 🏴󠁣󠁯󠁣󠁡󠁵󠁿 Flag for Cauca (CO-CAU) 🏴󠁣󠁵󠀰󠀷󠁿 Flag for Sancti Spíritus (CU-07) 🏴󠁣󠁲󠁬󠁿 Flag for Limón (CR-L) 🏴󠁣󠁯󠁮󠁳󠁡󠁿 Flag for Norte de Santander (CO-NSA) 🏴󠁣󠁵󠀰󠀴󠁿 Flag for Matanzas (CU-04) 🏴󠁣󠁲󠁧󠁿 Flag for Guanacaste (CR-G) 🏴󠁣󠁵󠀰󠀳󠁿 Flag for Havana (CU-03) 👩🏾‍❤️‍💋‍👨 Kiss - Woman: Medium-Dark Skin Tone, Man 🏴󠁣󠁵󠀰󠀸󠁿 Flag for Ciego de Ávila (CU-08) 🏴󠁣󠁯󠁴󠁯󠁬󠁿 Flag for Tolima (CO-TOL) 🏴󠁣󠁵󠀰󠀹󠁿 Flag for Camagüey (CU-09) 🏴󠁣󠁵󠀰󠀶󠁿 Flag for Cienfuegos (CU-06) 🏴󠁣󠁯󠁧󠁵󠁶󠁿 Flag for Guaviare (CO-GUV) 🏴󠁢󠁺󠁣󠁹󠁿 Flag for Cayo (BZ-CY) 🏴󠁥󠁴󠁳󠁮󠁿 Flag for Southern Nations, Nationalities, and Peoples (ET-SN) 🏴󠁣󠁵󠀰󠀱󠁿 Flag for Pinar del Río (CU-01) 🏴󠁣󠁲󠁳󠁪󠁿 Flag for San José (CR-SJ) 🏴󠁣󠁲󠁣󠁿 Flag for Cartago (CR-C) 🏴󠁣󠁯󠁬󠁡󠁧󠁿 Flag for La Guajira (CO-LAG) 🏴󠁣󠁹󠀰󠀲󠁿 Flag for Limassol (CY-02) 🏴󠁤󠁥󠁮󠁩󠁿 Flag for Lower Saxony (DE-NI) 🏴󠁢󠁺󠁯󠁷󠁿 Flag for Orange Walk (BZ-OW) 🏴󠁣󠁺󠀶󠀳󠁿 Flag for Kraj Vysočina (CZ-63) 🏴󠁣󠁺󠀵󠀱󠁿 Flag for Liberecký kraj (CZ-51) 🏴󠁣󠁵󠀱󠀰󠁿 Flag for Las Tunas (CU-10) 🏴󠁣󠁵󠀱󠀳󠁿 Flag for Santiago de Cuba (CU-13) 👨🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁣󠁹󠀰󠀱󠁿 Flag for Nicosia (CY-01) 🏴󠁣󠁺󠀲󠀰󠁿 Flag for Středočeský kraj (CZ-20) 🏴󠁣󠁦󠁶󠁫󠁿 Flag for Vakaga (CF-VK) 🏴󠁣󠁺󠀵󠀲󠁿 Flag for Královéhradecký kraj (CZ-52) 🏴󠁣󠁺󠀴󠀱󠁿 Flag for Karlovarský kraj (CZ-41) 🏴󠁣󠁵󠀱󠀵󠁿 Flag for Artemisa (CU-15) 🏴󠁣󠁹󠀰󠀴󠁿 Flag for Famagusta (CY-04) 🏴󠁤󠁥󠁨󠁢󠁿 Flag for Bremen (DE-HB) 🏴󠁤󠁥󠁨󠁥󠁿 Flag for Hesse (DE-HE) 🏴󠁣󠁵󠀱󠀱󠁿 Flag for Holguín (CU-11) 👨🏿‍👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁣󠁺󠀸󠀰󠁿 Flag for Moravskoslezský kraj (CZ-80) 🏴󠁣󠁺󠀳󠀱󠁿 Flag for Jihočeský kraj (CZ-31) 🏴󠁣󠁨󠁧󠁬󠁿 Flag for Glarus (CH-GL) 🏴󠁣󠁺󠀱󠀰󠁿 Flag for Praha, Hlavní mešto (CZ-10) 🏴󠁣󠁹󠀰󠀳󠁿 Flag for Larnaca (CY-03) 🏴󠁤󠁥󠁨󠁨󠁿 Flag for Hamburg (DE-HH) 🏴󠁤󠁥󠁭󠁶󠁿 Flag for Mecklenburg-Vorpommern (DE-MV) 🏴󠁣󠁶󠁢󠁿 Flag for Barlavento Islands (CV-B) 🏴󠁣󠁶󠁳󠁿 Flag for Sotavento Islands (CV-S) 🏴󠁣󠁵󠀱󠀶󠁿 Flag for Mayabeque (CU-16) 🏴󠁣󠁺󠀷󠀱󠁿 Flag for Olomoucký kraj (CZ-71) 🏴󠁣󠁵󠀱󠀴󠁿 Flag for Guantánamo (CU-14) 🏴󠁤󠁥󠁢󠁢󠁿 Flag for Brandenburg (DE-BB) 🏴󠁣󠁺󠀳󠀲󠁿 Flag for Plzeňský kraj (CZ-32) 🏴󠁤󠁪󠁡󠁳󠁿 Flag for Ali Sabieh (DJ-AS) 🏴󠁤󠁥󠁲󠁰󠁿 Flag for Rhineland-Palatinate (DE-RP) 🏴󠁤󠁥󠁳󠁮󠁿 Flag for Saxony (DE-SN) 🏴󠁤󠁫󠀸󠀵󠁿 Flag for Zealand (DK-85) 🏴󠁤󠁥󠁳󠁴󠁿 Flag for Saxony-Anhalt (DE-ST) 🏴󠁤󠁺󠀰󠀲󠁿 Flag for Chlef (DZ-02) 🏴󠁤󠁭󠀰󠀷󠁿 Flag for Saint Luke (DM-07) 🏴󠁤󠁪󠁡󠁲󠁿 Flag for Arta (DJ-AR) 🏴󠁤󠁫󠀸󠀴󠁿 Flag for Capital Region (DK-84) 🏴󠁤󠁭󠀱󠀰󠁿 Flag for Saint Paul (DM-10) 🏴󠁤󠁯󠀳󠀶󠁿 Flag for Cibao Sur (DO-36) 🏴󠁤󠁯󠀳󠀸󠁿 Flag for Enriquillo (DO-38) 🏴󠁤󠁭󠀰󠀹󠁿 Flag for Saint Patrick (DM-09) 🏴󠁤󠁯󠀳󠀴󠁿 Flag for Cibao Noroeste (DO-34) 🏴󠁤󠁯󠀳󠀳󠁿 Flag for Cibao Nordeste (DO-33) 🏴󠁤󠁭󠀰󠀵󠁿 Flag for Saint John (DM-05) 🏴󠁤󠁯󠀴󠀲󠁿 Flag for Yuma (DO-42) 🏴󠁤󠁪󠁯󠁢󠁿 Flag for Obock (DJ-OB) 🏴󠁤󠁥󠁴󠁨󠁿 Flag for Thuringia (DE-TH) 🏴󠁤󠁯󠀴󠀰󠁿 Flag for Ozama (DO-40) 🏴󠁤󠁥󠁳󠁬󠁿 Flag for Saarland (DE-SL) 🏴󠁤󠁭󠀰󠀴󠁿 Flag for Saint George (DM-04) 🏴󠁤󠁭󠀰󠀳󠁿 Flag for Saint David (DM-03) 🏴󠁤󠁭󠀰󠀲󠁿 Flag for Saint Andrew (DM-02) 🏴󠁤󠁪󠁤󠁩󠁿 Flag for Dikhil (DJ-DI) 🏴󠁤󠁭󠀰󠀸󠁿 Flag for Saint Mark (DM-08) 🏴󠁤󠁪󠁴󠁡󠁿 Flag for Tadjourah (DJ-TA) 🏴󠁤󠁭󠀱󠀱󠁿 Flag for Saint Peter (DM-11) 🏴󠁤󠁯󠀴󠀱󠁿 Flag for Valdesia (DO-41) 🏴󠁤󠁯󠀳󠀹󠁿 Flag for Higüamo (DO-39) 🏴󠁤󠁺󠀰󠀳󠁿 Flag for Laghouat (DZ-03) 🏴󠁤󠁺󠀲󠀸󠁿 Flag for M’Sila (DZ-28) 🏴󠁤󠁺󠀳󠀳󠁿 Flag for Illizi (DZ-33) 👩🏿‍👨🏿‍👧🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁤󠁺󠀱󠀵󠁿 Flag for Tizi Ouzou (DZ-15) 🏴󠁤󠁺󠀱󠀴󠁿 Flag for Tiaret (DZ-14) 🏴󠁤󠁺󠀱󠀹󠁿 Flag for Sétif (DZ-19) 🏴󠁤󠁺󠀱󠀷󠁿 Flag for Djelfa (DZ-17) 🏴󠁤󠁺󠀲󠀵󠁿 Flag for Constantine (DZ-25) 🏴󠁤󠁺󠀲󠀴󠁿 Flag for Guelma (DZ-24) 🏴󠁤󠁺󠀴󠀲󠁿 Flag for Tipasa (DZ-42) 🏴󠁤󠁺󠀰󠀵󠁿 Flag for Batna (DZ-05) 🏴󠁤󠁺󠀱󠀲󠁿 Flag for Tébessa (DZ-12) 🏴󠁤󠁺󠀰󠀷󠁿 Flag for Biskra (DZ-07) 🏴󠁤󠁺󠀳󠀰󠁿 Flag for Ouargla (DZ-30) 🏴󠁤󠁺󠀲󠀲󠁿 Flag for Sidi Bel Abbès (DZ-22) 🏴󠁤󠁺󠀱󠀱󠁿 Flag for Tamanghasset (DZ-11) 🏴󠁤󠁺󠀲󠀶󠁿 Flag for Médéa (DZ-26) 🏴󠁤󠁺󠀳󠀲󠁿 Flag for El Bayadh (DZ-32) 🏴󠁤󠁺󠀴󠀰󠁿 Flag for Khenchela (DZ-40) 🏴󠁤󠁺󠀳󠀸󠁿 Flag for Tissemsilt (DZ-38) 🏴󠁤󠁺󠀳󠀹󠁿 Flag for El Oued (DZ-39) 🏴󠁤󠁺󠀴󠀱󠁿 Flag for Souk Ahras (DZ-41) 🏴󠁤󠁺󠀱󠀳󠁿 Flag for Tlemcen (DZ-13) 🏴󠁤󠁺󠀰󠀶󠁿 Flag for Béjaïa (DZ-06) 🏴󠁤󠁺󠀴󠀳󠁿 Flag for Mila (DZ-43) 🏴󠁤󠁺󠀲󠀰󠁿 Flag for Saïda (DZ-20) 🏴󠁤󠁺󠀳󠀱󠁿 Flag for Oran (DZ-31) 🏴󠁤󠁺󠀱󠀰󠁿 Flag for Bouira (DZ-10) 🏴󠁤󠁺󠀳󠀵󠁿 Flag for Boumerdès (DZ-35) 🏴󠁤󠁺󠀳󠀶󠁿 Flag for El Tarf (DZ-36) 🏴󠁤󠁺󠀱󠀶󠁿 Flag for Algiers (DZ-16) 🏴󠁤󠁺󠀳󠀷󠁿 Flag for Tindouf (DZ-37) 🏴󠁤󠁺󠀲󠀳󠁿 Flag for Annaba (DZ-23) 🏴󠁤󠁺󠀰󠀹󠁿 Flag for Blida (DZ-09) 🏴󠁤󠁺󠀰󠀴󠁿 Flag for Oum El Bouaghi (DZ-04) 🏴󠁤󠁺󠀲󠀷󠁿 Flag for Mostaganem (DZ-27) 🏴󠁥󠁣󠁨󠁿 Flag for Chimborazo (EC-H) 🏴󠁤󠁺󠀴󠀷󠁿 Flag for Ghardaïa (DZ-47) 🏴󠁥󠁣󠁢󠁿 Flag for Bolívar (EC-B) 🏴󠁥󠁣󠁣󠁿 Flag for Carchi (EC-C) 🏴󠁤󠁺󠀴󠀴󠁿 Flag for Aïn Defla (DZ-44) 🏴󠁣󠁹󠀰󠀵󠁿 Flag for Paphos (CY-05) 🏴󠁤󠁺󠀴󠀸󠁿 Flag for Relizane (DZ-48) 🏴󠁥󠁣󠁳󠁿 Flag for Morona-Santiago (EC-S) 🏴󠁣󠁨󠁪󠁵󠁿 Flag for Jura (CH-JU) 🏴󠁥󠁣󠁳󠁥󠁿 Flag for Santa Elena (EC-SE) 🏴󠁥󠁥󠀵󠀷󠁿 Flag for Lääne (EE-57) 🏴󠁥󠁣󠁩󠁿 Flag for Imbabura (EC-I) 🏴󠁤󠁺󠀴󠀶󠁿 Flag for Aïn Témouchent (DZ-46) 🏴󠁥󠁣󠁷󠁿 Flag for Galápagos (EC-W) 🏴󠁥󠁣󠁮󠁿 Flag for Napo (EC-N) 👨🏽‍👶🏽‍👦🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁥󠁥󠀶󠀷󠁿 Flag for Pärnu (EE-67) 🏴󠁥󠁥󠀷󠀸󠁿 Flag for Tartu (EE-78) 🏴󠁥󠁣󠁡󠁿 Flag for Azuay (EC-A) 🏴󠁥󠁣󠁭󠁿 Flag for Manabí (EC-M) 🏴󠁥󠁣󠁯󠁿 Flag for El Oro (EC-O) 🏴󠁥󠁣󠁰󠁿 Flag for Pichincha (EC-P) 🏴󠁥󠁥󠀷󠀰󠁿 Flag for Rapla (EE-70) 🏴󠁥󠁥󠀷󠀴󠁿 Flag for Saare (EE-74) 👨🏾‍👶🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁥󠁥󠀶󠀵󠁿 Flag for Põlva (EE-65) 🏴󠁥󠁣󠁹󠁿 Flag for Pastaza (EC-Y) 🏴󠁥󠁣󠁧󠁿 Flag for Guayas (EC-G) 🏴󠁥󠁣󠁲󠁿 Flag for Los Ríos (EC-R) 🏴󠁥󠁣󠁵󠁿 Flag for Sucumbíos (EC-U) 🏴󠁥󠁥󠀴󠀹󠁿 Flag for Jõgeva (EE-49) 🏴󠁥󠁥󠀸󠀲󠁿 Flag for Valga (EE-82) 🏴󠁥󠁣󠁬󠁿 Flag for Loja (EC-L) 🏴󠁥󠁣󠁤󠁿 Flag for Orellana (EC-D) 👨🏼‍👶🏼‍👦🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Boy: Medium-Light Skin Tone 🏴󠁤󠁺󠀴󠀵󠁿 Flag for Naama (DZ-45) 🏴󠁥󠁥󠀵󠀱󠁿 Flag for Järva (EE-51) 🏴󠁥󠁧󠁳󠁩󠁮󠁿 Flag for North Sinai (EG-SIN) 🏴󠁥󠁧󠁪󠁳󠁿 Flag for South Sinai (EG-JS) 🏴󠁥󠁧󠁫󠁮󠁿 Flag for Qena (EG-KN) 🏴󠁥󠁥󠀸󠀴󠁿 Flag for Viljandi (EE-84) 🏴󠁥󠁧󠁩󠁳󠁿 Flag for Ismailia (EG-IS) 🏴󠁥󠁧󠁡󠁳󠁮󠁿 Flag for Aswan (EG-ASN) 🏴󠁥󠁧󠁤󠁫󠁿 Flag for Dakahlia (EG-DK) 🏴󠁥󠁧󠁧󠁨󠁿 Flag for Gharbia (EG-GH) 🏴󠁥󠁧󠁢󠁨󠁿 Flag for Beheira (EG-BH) 🏴󠁥󠁥󠀸󠀶󠁿 Flag for Võru (EE-86) 🏴󠁥󠁧󠁡󠁳󠁴󠁿 Flag for Asyut (EG-AST) 🏴󠁥󠁧󠁫󠁢󠁿 Flag for Qalyubia (EG-KB) 🏴󠁥󠁧󠁧󠁺󠁿 Flag for Giza (EG-GZ) 👨🏿‍👶🏿‍👦🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁥󠁲󠁡󠁮󠁿 Flag for Anseba (ER-AN) 🏴󠁥󠁧󠁫󠁦󠁳󠁿 Flag for Kafr el-Sheikh (EG-KFS) 🏴󠁥󠁧󠁭󠁴󠁿 Flag for Matrouh (EG-MT) 🏴󠁥󠁲󠁧󠁢󠁿 Flag for Gash-Barka (ER-GB) 🏴󠁥󠁧󠁭󠁮󠁿 Flag for Minya (EG-MN) 🏴󠁥󠁧󠁡󠁬󠁸󠁿 Flag for Alexandria (EG-ALX) 🏴󠁥󠁲󠁤󠁫󠁿 Flag for Southern Red Sea (ER-DK) 🏴󠁥󠁧󠁰󠁴󠁳󠁿 Flag for Port Said (EG-PTS) 🏴󠁥󠁧󠁳󠁨󠁧󠁿 Flag for Sohag (EG-SHG) 🏴󠁥󠁧󠁷󠁡󠁤󠁿 Flag for New Valley (EG-WAD) 🏴󠁥󠁲󠁳󠁫󠁿 Flag for Northern Red Sea (ER-SK) 🏴󠁥󠁧󠁳󠁵󠁺󠁿 Flag for Suez (EG-SUZ) 🏴󠁥󠁧󠁭󠁮󠁦󠁿 Flag for Monufia (EG-MNF) 🏴󠁥󠁧󠁬󠁸󠁿 Flag for Luxor (EG-LX) 🏴󠁥󠁲󠁭󠁡󠁿 Flag for Maekel (ER-MA) 🏴󠁥󠁧󠁤󠁴󠁿 Flag for Damietta (EG-DT) 🏴󠁥󠁧󠁳󠁨󠁲󠁿 Flag for Al Sharqia (EG-SHR) 🏴󠁥󠁧󠁦󠁹󠁭󠁿 Flag for Faiyum (EG-FYM) 🏴󠁥󠁲󠁤󠁵󠁿 Flag for Debub (ER-DU) 🏴󠁥󠁳󠁡󠁲󠁿 Flag for Aragon (ES-AR) 🏴󠁣󠁮󠀳󠀴󠁿 Flag for Anhui (CN-34) 🏴󠁤󠁫󠀸󠀱󠁿 Flag for Northern Denmark (DK-81) 👨🏻‍👶🏻‍👧🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Girl: Light Skin Tone 👨🏼‍👶🏼‍👧🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 👨🏽‍👶🏽‍👧🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁥󠁴󠁴󠁩󠁿 Flag for Tigray (ET-TI) 🏴󠁣󠁮󠀲󠀱󠁿 Flag for Liaoning (CN-21) 🏴󠁥󠁴󠁧󠁡󠁿 Flag for Gambela (ET-GA) 🏴󠁥󠁳󠁭󠁬󠁿 Flag for Melilla (ES-ML) 🏴󠁥󠁳󠁭󠁣󠁿 Flag for Murcia Region (ES-MC) 🏴󠁦󠁩󠀱󠀰󠁿 Flag for Lapland (FI-10) 🏴󠁦󠁩󠀰󠀷󠁿 Flag for Central Ostrobothnia (FI-07) 🏴󠁥󠁴󠁡󠁭󠁿 Flag for Amhara (ET-AM) 🏴󠁥󠁴󠁢󠁥󠁿 Flag for Benishangul-Gumuz (ET-BE) 🏴󠁥󠁴󠁯󠁲󠁿 Flag for Oromia (ET-OR) 🏴󠁥󠁳󠁲󠁩󠁿 Flag for La Rioja (ES-RI) 🏴󠁤󠁪󠁤󠁪󠁿 Flag for Djibouti (DJ-DJ) 🏴󠁥󠁳󠁭󠁤󠁿 Flag for Madrid Autonomous Community (ES-MD) 🏴󠁥󠁴󠁤󠁤󠁿 Flag for Dire Dawa (ET-DD) 🏴󠁤󠁺󠀲󠀹󠁿 Flag for Mascara (DZ-29) 🏴󠁦󠁩󠀰󠀵󠁿 Flag for Kainuu (FI-05) 🏴󠁦󠁩󠀰󠀹󠁿 Flag for Kymenlaakso (FI-09) 🏴󠁦󠁩󠀰󠀳󠁿 Flag for Southern Ostrobothnia (FI-03) 🏴󠁦󠁩󠀱󠀱󠁿 Flag for Pirkanmaa (FI-11) 🏴󠁦󠁩󠀰󠀴󠁿 Flag for Southern Savonia (FI-04) 🏴󠁦󠁩󠀱󠀳󠁿 Flag for North Karelia (FI-13) 🏴󠁦󠁩󠀰󠀲󠁿 Flag for South Karelia (FI-02) 🏴󠁥󠁴󠁨󠁡󠁿 Flag for Harari (ET-HA) 🏴󠁣󠁺󠀷󠀲󠁿 Flag for Zlínský kraj (CZ-72) 🏴󠁥󠁴󠁳󠁯󠁿 Flag for Somali (ET-SO) 🏴󠁥󠁳󠁣󠁴󠁿 Flag for Catalonia (ES-CT) 🏴󠁦󠁭󠁫󠁳󠁡󠁿 Flag for Kosrae (FM-KSA) 🏴󠁦󠁲󠁮󠁣󠁿 Flag for New Caledonia (FR-NC) 🏴󠁦󠁲󠁯󠁣󠁣󠁿 Flag for Occitanie (FR-OCC) 🏴󠁦󠁲󠁰󠁡󠁣󠁿 Flag for Provence-Alpes-Côte-d’Azur (FR-PAC) 🏴󠁦󠁩󠀱󠀵󠁿 Flag for Northern Savonia (FI-15) 🏴󠁦󠁭󠁴󠁲󠁫󠁿 Flag for Chuuk (FM-TRK) 🏴󠁦󠁲󠁢󠁦󠁣󠁿 Flag for Bourgogne-Franche-Comté (FR-BFC) 🏴󠁦󠁩󠀱󠀴󠁿 Flag for Northern Ostrobothnia (FI-14) 🏴󠁦󠁪󠁲󠁿 Flag for Rotuma (FJ-R) 🏴󠁦󠁲󠁭󠁡󠁹󠁿 Flag for Mayotte (FR-MAY) 🏴󠁦󠁲󠁮󠁡󠁱󠁿 Flag for Nouvelle-Aquitaine (FR-NAQ) 🏴󠁦󠁪󠁣󠁿 Flag for Central (FJ-C) 🏴󠁦󠁲󠁧󠁥󠁳󠁿 Flag for Grand-Est (FR-GES) 🏴󠁦󠁪󠁮󠁿 Flag for Northern (FJ-N) 🏴󠁦󠁲󠁧󠁵󠁡󠁿 Flag for Guadeloupe (FR-GUA) 🏴󠁦󠁭󠁹󠁡󠁰󠁿 Flag for Yap (FM-YAP) 🏴󠁦󠁲󠁢󠁲󠁥󠁿 Flag for Bretagne (FR-BRE) 🏴󠁦󠁲󠁰󠁦󠁿 Flag for French Polynesia (FR-PF) 🏴󠁦󠁲󠁮󠁯󠁲󠁿 Flag for Normandie (FR-NOR) 🏴󠁦󠁲󠁧󠁦󠁿 Flag for French Guiana (FR-GF) 🏴󠁦󠁲󠁣󠁶󠁬󠁿 Flag for Centre-Val de Loire (FR-CVL) 🏴󠁦󠁲󠁣󠁰󠁿 Flag for Clipperton Island (FR-CP) 🏴󠁦󠁲󠁭󠁦󠁿 Flag for St. Martin (FR-MF) 🏴󠁦󠁩󠀱󠀶󠁿 Flag for Päijänne Tavastia (FI-16) 🏴󠁦󠁩󠀱󠀹󠁿 Flag for Southwest Finland (FI-19) 🏴󠁦󠁲󠁬󠁲󠁥󠁿 Flag for La Réunion (FR-LRE) 🏴󠁦󠁩󠀱󠀷󠁿 Flag for Satakunta (FI-17) 🏴󠁧󠁥󠁳󠁫󠁿 Flag for Shida Kartli (GE-SK) 🏴󠁧󠁡󠀳󠁿 Flag for Moyen-Ogooué (GA-3) 👨🏿‍👶🏿‍👧🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁧󠁤󠀰󠀳󠁿 Flag for Saint George (GD-03) 🏴󠁧󠁡󠀵󠁿 Flag for Nyanga (GA-5) 🏴󠁧󠁡󠀶󠁿 Flag for Ogooué-Ivindo (GA-6) 🏴󠁧󠁨󠁢󠁡󠁿 Flag for Brong-Ahafo (GH-BA) 🏴󠁧󠁡󠀲󠁿 Flag for Haut-Ogooué (GA-2) 🏴󠁧󠁤󠀰󠀱󠁿 Flag for Saint Andrew (GD-01) 🏴󠁧󠁤󠀰󠀶󠁿 Flag for Saint Patrick (GD-06) 🏴󠁥󠁳󠁧󠁡󠁿 Flag for Galicia (ES-GA) 🏴󠁦󠁲󠁷󠁦󠁿 Flag for Wallis & Futuna (FR-WF) 👨🏻‍👶🏻‍👶🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Baby: Light Skin Tone 🏴󠁦󠁲󠁰󠁭󠁿 Flag for St. Pierre & Miquelon (FR-PM) 🏴󠁧󠁤󠀰󠀴󠁿 Flag for Saint John (GD-04) 🏴󠁧󠁥󠁴󠁢󠁿 Flag for Tbilisi (GE-TB) 👨🏼‍👶🏼‍👶🏼 Family - Man: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone, Baby: Medium-Light Skin Tone 🏴󠁧󠁤󠀰󠀲󠁿 Flag for Saint David (GD-02) 🏴󠁧󠁥󠁧󠁵󠁿 Flag for Guria (GE-GU) 🏴󠁧󠁡󠀹󠁿 Flag for Woleu-Ntem (GA-9) 🏴󠁧󠁥󠁲󠁬󠁿 Flag for Racha-Lechkhumi and Kvemo Svaneti (GE-RL) 🏴󠁧󠁥󠁳󠁪󠁿 Flag for Samtskhe-Javakheti (GE-SJ) 🏴󠁧󠁥󠁭󠁭󠁿 Flag for Mtskheta-Mtianeti (GE-MM) 🏴󠁧󠁥󠁩󠁭󠁿 Flag for Imereti (GE-IM) 🏴󠁧󠁡󠀸󠁿 Flag for Ogooué-Maritime (GA-8) 🏴󠁣󠁮󠀶󠀱󠁿 Flag for Shaanxi (CN-61) 🏴󠁧󠁨󠁡󠁡󠁿 Flag for Greater Accra (GH-AA) 🏴󠁣󠁺󠀶󠀴󠁿 Flag for Jihomoravský kraj (CZ-64) 🏴󠁧󠁥󠁡󠁪󠁿 Flag for Adjara (GE-AJ) 🏴󠁧󠁥󠁳󠁺󠁿 Flag for Samegrelo-Zemo Svaneti (GE-SZ) 🏴󠁧󠁡󠀱󠁿 Flag for Estuaire (GA-1) 🏴󠁧󠁡󠀷󠁿 Flag for Ogooué-Lolo (GA-7) 🏴󠁧󠁮󠁤󠁿 Flag for Kindia Region (GN-D) 🏴󠁧󠁮󠁭󠁿 Flag for Mamou Region (GN-M) 👨🏽‍👶🏽‍👶🏽 Family - Man: Medium Skin Tone, Baby: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁧󠁬󠁱󠁡󠁿 Flag for Qaasuitsup (GL-QA) 🏴󠁧󠁭󠁮󠁿 Flag for North Bank Division (GM-N) 🏴󠁧󠁬󠁳󠁭󠁿 Flag for Sermersooq (GL-SM) 🏴󠁧󠁨󠁮󠁰󠁿 Flag for Northern (GH-NP) 🏴󠁧󠁲󠁦󠁿 Flag for Ionian Islands (GR-F) 🏴󠁧󠁲󠁨󠁿 Flag for Central Greece (GR-H) 🏴󠁧󠁨󠁣󠁰󠁿 Flag for Central (GH-CP) 🏴󠁧󠁮󠁫󠁿 Flag for Kankan Region (GN-K) 🏴󠁧󠁲󠁬󠁿 Flag for South Aegean (GR-L) 🏴󠁧󠁲󠁩󠁿 Flag for Attica (GR-I) 🏴󠁧󠁭󠁵󠁿 Flag for Upper River Division (GM-U) 🏴󠁧󠁨󠁥󠁰󠁿 Flag for Eastern (GH-EP) 🏴󠁧󠁮󠁮󠁿 Flag for Nzérékoré Region (GN-N) 🏴󠁧󠁨󠁷󠁰󠁿 Flag for Western (GH-WP) 🏴󠁧󠁲󠁣󠁿 Flag for West Macedonia (GR-C) 🏴󠁧󠁱󠁣󠁿 Flag for Río Muni (GQ-C) 🏴󠁧󠁭󠁬󠁿 Flag for Lower River Division (GM-L) 🏴󠁧󠁨󠁵󠁥󠁿 Flag for Upper East (GH-UE) 🏴󠁧󠁮󠁣󠁿 Flag for Conakry (GN-C) 🏴󠁧󠁲󠁢󠁿 Flag for Central Macedonia (GR-B) 🏴󠁧󠁭󠁭󠁿 Flag for Central River Division (GM-M) 🏴󠁧󠁨󠁵󠁷󠁿 Flag for Upper West (GH-UW) 🏴󠁧󠁬󠁫󠁵󠁿 Flag for Kujalleq (GL-KU) 🏴󠁧󠁮󠁢󠁿 Flag for Boké Region (GN-B) 🏴󠁧󠁬󠁱󠁥󠁿 Flag for Qeqqata (GL-QE) 🏴󠁧󠁲󠁤󠁿 Flag for Epirus (GR-D) 🏴󠁧󠁨󠁡󠁨󠁿 Flag for Ashanti (GH-AH) 🏴󠁧󠁨󠁴󠁶󠁿 Flag for Volta (GH-TV) 🏴󠁧󠁲󠀶󠀹󠁿 Flag for Mount Athos (GR-69) 🏴󠁧󠁱󠁩󠁿 Flag for Insular (GQ-I) 🏴󠁧󠁭󠁷󠁿 Flag for West Coast Division (GM-W) 🏴󠁧󠁭󠁢󠁿 Flag for Banjul (GM-B) 🏴󠁧󠁮󠁬󠁿 Flag for Labé Region (GN-L) 🏴󠁧󠁲󠁥󠁿 Flag for Thessaly (GR-E) 🏴󠁧󠁮󠁦󠁿 Flag for Faranah Region (GN-F) 🏴󠁧󠁹󠁣󠁵󠁿 Flag for Cuyuni-Mazaruni (GY-CU) 🏴󠁨󠁮󠁡󠁴󠁿 Flag for Atlántida (HN-AT) 👨🏾‍👶🏾‍👶🏾 Family - Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁧󠁴󠁨󠁵󠁿 Flag for Huehuetenango (GT-HU) 🏴󠁧󠁴󠁡󠁶󠁿 Flag for Alta Verapaz (GT-AV) 🏴󠁧󠁴󠁰󠁲󠁿 Flag for El Progreso (GT-PR) 🏴󠁧󠁷󠁮󠁿 Flag for Norte (GW-N) 🏴󠁧󠁴󠁳󠁵󠁿 Flag for Suchitepéquez (GT-SU) 🏴󠁧󠁹󠁰󠁭󠁿 Flag for Pomeroon-Supenaam (GY-PM) 🏴󠁧󠁴󠁩󠁺󠁿 Flag for Izabal (GT-IZ) 🏴󠁧󠁹󠁰󠁴󠁿 Flag for Potaro-Siparuni (GY-PT) 🏴󠁧󠁴󠁱󠁺󠁿 Flag for Quetzaltenango (GT-QZ) 🏴󠁧󠁴󠁣󠁭󠁿 Flag for Chimaltenango (GT-CM) 🏴󠁥󠁴󠁡󠁡󠁿 Flag for Addis Ababa (ET-AA) 🏴󠁧󠁷󠁢󠁳󠁿 Flag for Bissau (GW-BS) 🏴󠁧󠁴󠁱󠁣󠁿 Flag for Quiché (GT-QC) 🏴󠁧󠁴󠁴󠁯󠁿 Flag for Totonicapán (GT-TO) 🏴󠁧󠁹󠁢󠁡󠁿 Flag for Barima-Waini (GY-BA) 🏴󠁧󠁹󠁥󠁳󠁿 Flag for Essequibo Islands-West Demerara (GY-ES) 👨🏿‍👶🏿‍👶🏿 Family - Man: Dark Skin Tone, Baby: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁨󠁮󠁣󠁨󠁿 Flag for Choluteca (HN-CH) 🏴󠁧󠁹󠁤󠁥󠁿 Flag for Demerara-Mahaica (GY-DE) 👨🏻‍👨🏻‍👦🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone 🏴󠁧󠁴󠁳󠁡󠁿 Flag for Sacatepéquez (GT-SA) 🏴󠁧󠁴󠁪󠁵󠁿 Flag for Jutiapa (GT-JU) 🏴󠁧󠁴󠁣󠁱󠁿 Flag for Chiquimula (GT-CQ) 🏴󠁧󠁴󠁢󠁶󠁿 Flag for Baja Verapaz (GT-BV) 🏴󠁧󠁴󠁥󠁳󠁿 Flag for Escuintla (GT-ES) 🏴󠁧󠁴󠁺󠁡󠁿 Flag for Zacapa (GT-ZA) 🏴󠁧󠁷󠁳󠁿 Flag for Sul (GW-S) 🏴󠁧󠁷󠁬󠁿 Flag for Leste (GW-L) 🏴󠁧󠁴󠁪󠁡󠁿 Flag for Jalapa (GT-JA) 🏴󠁧󠁴󠁰󠁥󠁿 Flag for Petén (GT-PE) 🏴󠁧󠁴󠁳󠁯󠁿 Flag for Sololá (GT-SO) 🏴󠁨󠁮󠁣󠁭󠁿 Flag for Comayagua (HN-CM) 🏴󠁨󠁲󠀰󠀶󠁿 Flag for Koprivnica-Križevci (HR-06) 🏴󠁨󠁮󠁣󠁰󠁿 Flag for Copán (HN-CP) 🏴󠁨󠁮󠁩󠁢󠁿 Flag for Bay Islands (HN-IB) 🏴󠁨󠁲󠀰󠀹󠁿 Flag for Lika-Senj (HR-09) 🏴󠁨󠁮󠁳󠁢󠁿 Flag for Santa Bárbara (HN-SB) 🏴󠁨󠁮󠁩󠁮󠁿 Flag for Intibucá (HN-IN) 🏴󠁨󠁮󠁦󠁭󠁿 Flag for Francisco Morazán (HN-FM) 🏴󠁨󠁲󠀰󠀱󠁿 Flag for Zagreb County (HR-01) 🏴󠁨󠁮󠁣󠁬󠁿 Flag for Colón (HN-CL) 🏴󠁨󠁴󠁣󠁥󠁿 Flag for Centre (HT-CE) 🏴󠁨󠁲󠀰󠀸󠁿 Flag for Primorje-Gorski Kotar (HR-08) 🏴󠁨󠁮󠁬󠁥󠁿 Flag for Lempira (HN-LE) 🏴󠁨󠁲󠀱󠀴󠁿 Flag for Osijek-Baranja (HR-14) 🏴󠁨󠁲󠀱󠀲󠁿 Flag for Brod-Posavina (HR-12) 🏴󠁨󠁲󠀱󠀷󠁿 Flag for Split-Dalmatia (HR-17) 🏴󠁨󠁮󠁯󠁬󠁿 Flag for Olancho (HN-OL) 🏴󠁨󠁮󠁬󠁰󠁿 Flag for La Paz (HN-LP) 🏴󠁨󠁲󠀲󠀰󠁿 Flag for Međimurje (HR-20) 🏴󠁨󠁮󠁥󠁰󠁿 Flag for El Paraíso (HN-EP) 🏴󠁨󠁲󠀲󠀱󠁿 Flag for Zagreb (HR-21) 🏴󠁨󠁲󠀱󠀵󠁿 Flag for Šibenik-Knin (HR-15) 🏴󠁥󠁥󠀴󠀴󠁿 Flag for Ida-Viru (EE-44) 🏴󠁨󠁮󠁣󠁲󠁿 Flag for Cortés (HN-CR) 🏴󠁨󠁲󠀰󠀳󠁿 Flag for Sisak-Moslavina (HR-03) 🏴󠁨󠁲󠀱󠀳󠁿 Flag for Zadar (HR-13) 🏴󠁨󠁲󠀱󠀸󠁿 Flag for Istria (HR-18) 🏴󠁨󠁲󠀰󠀲󠁿 Flag for Krapina-Zagorje (HR-02) 🏴󠁨󠁲󠀱󠀶󠁿 Flag for Vukovar-Syrmia (HR-16) 🏴󠁨󠁮󠁹󠁯󠁿 Flag for Yoro (HN-YO) 🏴󠁨󠁴󠁡󠁲󠁿 Flag for Artibonite (HT-AR) 🏴󠁨󠁮󠁧󠁤󠁿 Flag for Gracias a Dios (HN-GD) 🏴󠁨󠁮󠁶󠁡󠁿 Flag for Valle (HN-VA) 🏴󠁤󠁺󠀱󠀸󠁿 Flag for Jijel (DZ-18) 🏴󠁨󠁲󠀱󠀹󠁿 Flag for Dubrovnik-Neretva (HR-19) 🏴󠁨󠁲󠀱󠀱󠁿 Flag for Požega-Slavonia (HR-11) 🏴󠁨󠁲󠀰󠀷󠁿 Flag for Bjelovar-Bilogora (HR-07) 🏴󠁨󠁮󠁯󠁣󠁿 Flag for Ocotepeque (HN-OC) 🏴󠁨󠁵󠁢󠁵󠁿 Flag for Budapest (HU-BU) 🏴󠁨󠁵󠁨󠁶󠁿 Flag for Hódmezővásárhely (HU-HV) 🏴󠁨󠁵󠁦󠁥󠁿 Flag for Fejér (HU-FE) 🏴󠁨󠁵󠁢󠁡󠁿 Flag for Baranya (HU-BA) 🏴󠁨󠁵󠁳󠁦󠁿 Flag for Székesfehérvár (HU-SF) 🏴󠁨󠁵󠁢󠁺󠁿 Flag for Borsod-Abaúj-Zemplén (HU-BZ) 🏴󠁨󠁵󠁣󠁳󠁿 Flag for Csongrád (HU-CS) 🏴󠁨󠁵󠁳󠁮󠁿 Flag for Sopron (HU-SN) 🏴󠁨󠁵󠁤󠁵󠁿 Flag for Dunaújváros (HU-DU) 🏴󠁨󠁵󠁫󠁶󠁿 Flag for Kaposvár (HU-KV) 🏴󠁨󠁵󠁮󠁹󠁿 Flag for Nyíregyháza (HU-NY) 🏴󠁨󠁵󠁨󠁢󠁿 Flag for Hajdú-Bihar (HU-HB) 🏴󠁨󠁴󠁯󠁵󠁿 Flag for Ouest (HT-OU) 🏴󠁨󠁵󠁳󠁤󠁿 Flag for Szeged (HU-SD) 🏴󠁨󠁵󠁰󠁥󠁿 Flag for Pest (HU-PE) 🏴󠁨󠁵󠁫󠁥󠁿 Flag for Komárom-Esztergom (HU-KE) 🏴󠁨󠁵󠁮󠁫󠁿 Flag for Nagykanizsa (HU-NK) 🏴󠁨󠁴󠁧󠁡󠁿 Flag for Grand’Anse (HT-GA) 🏴󠁨󠁵󠁢󠁣󠁿 Flag for Békéscsaba (HU-BC) 🏴󠁨󠁴󠁳󠁤󠁿 Flag for Sud (HT-SD) 🏴󠁨󠁴󠁮󠁯󠁿 Flag for Nord-Ouest (HT-NO) 🏴󠁨󠁵󠁨󠁥󠁿 Flag for Heves (HU-HE) 🏴󠁨󠁵󠁢󠁫󠁿 Flag for Bács-Kiskun (HU-BK) 🏴󠁨󠁵󠁭󠁩󠁿 Flag for Miskolc (HU-MI) 🏴󠁨󠁵󠁥󠁲󠁿 Flag for Érd (HU-ER) 👨🏽‍👨🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁨󠁴󠁮󠁩󠁿 Flag for Nippes (HT-NI) 🏴󠁨󠁵󠁳󠁫󠁿 Flag for Szolnok (HU-SK) 🏴󠁨󠁴󠁮󠁤󠁿 Flag for Nord (HT-ND) 🏴󠁨󠁴󠁳󠁥󠁿 Flag for Sud-Est (HT-SE) 🏴󠁨󠁵󠁪󠁮󠁿 Flag for Jász-Nagykun-Szolnok (HU-JN) 🏴󠁨󠁵󠁰󠁳󠁿 Flag for Pécs (HU-PS) 🏴󠁨󠁵󠁫󠁭󠁿 Flag for Kecskemét (HU-KM) 🏴󠁨󠁵󠁤󠁥󠁿 Flag for Debrecen (HU-DE) 🏴󠁨󠁵󠁢󠁥󠁿 Flag for Békés (HU-BE) 🏴󠁨󠁵󠁮󠁯󠁿 Flag for Nógrád (HU-NO) 🏴󠁨󠁵󠁳󠁨󠁿 Flag for Szombathely (HU-SH) 🏴󠁨󠁵󠁧󠁹󠁿 Flag for Győr (HU-GY) 🏴󠁩󠁤󠁮󠁵󠁿 Flag for Lesser Sunda Islands (ID-NU) 🏴󠁨󠁵󠁴󠁢󠁿 Flag for Tatabánya (HU-TB) 🏴󠁩󠁤󠁪󠁷󠁿 Flag for Java (ID-JW) 🏴󠁩󠁮󠁣󠁨󠁿 Flag for Chandigarh (IN-CH) 🏴󠁩󠁮󠁧󠁪󠁿 Flag for Gujarat (IN-GJ) 🏴󠁩󠁥󠁬󠁿 Flag for Leinster (IE-L) 🏴󠁨󠁵󠁺󠁡󠁿 Flag for Zala (HU-ZA) 🏴󠁩󠁮󠁤󠁤󠁿 Flag for Daman and Diu (IN-DD) 🏴󠁩󠁬󠁴󠁡󠁿 Flag for Tel Aviv District (IL-TA) 🏴󠁩󠁤󠁳󠁬󠁿 Flag for Sulawesi (ID-SL) 🏴󠁩󠁮󠁡󠁲󠁿 Flag for Arunachal Pradesh (IN-AR) 🏴󠁨󠁵󠁶󠁥󠁿 Flag for Veszprém County (HU-VE) 🏴󠁩󠁮󠁡󠁮󠁿 Flag for Andaman and Nicobar Islands (IN-AN) 🏴󠁨󠁵󠁳󠁯󠁿 Flag for Somogy (HU-SO) 🏴󠁨󠁵󠁶󠁡󠁿 Flag for Vas (HU-VA) 🏴󠁩󠁬󠁪󠁭󠁿 Flag for Jerusalem (IL-JM) 🏴󠁩󠁮󠁤󠁮󠁿 Flag for Dadra and Nagar Haveli (IN-DN) 🏴󠁨󠁵󠁶󠁭󠁿 Flag for Veszprém (HU-VM) 🏴󠁨󠁵󠁳󠁴󠁿 Flag for Salgótarján (HU-ST) 🏴󠁩󠁮󠁣󠁴󠁿 Flag for Chhattisgarh (IN-CT) 🏴󠁩󠁥󠁵󠁿 Flag for Ulster (IE-U) 🏴󠁩󠁮󠁤󠁬󠁿 Flag for Delhi (IN-DL) 🏴󠁩󠁥󠁭󠁿 Flag for Munster (IE-M) 🏴󠁩󠁥󠁣󠁿 Flag for Connacht (IE-C) 🏴󠁩󠁬󠁨󠁡󠁿 Flag for Haifa District (IL-HA) 🏴󠁩󠁤󠁫󠁡󠁿 Flag for Kalimantan (ID-KA) 🏴󠁩󠁮󠁧󠁡󠁿 Flag for Goa (IN-GA) 🏴󠁩󠁤󠁳󠁭󠁿 Flag for Sumatra (ID-SM) 🏴󠁩󠁤󠁰󠁰󠁿 Flag for Papua Islands (ID-PP) 🏴󠁨󠁵󠁳󠁳󠁿 Flag for Szekszárd (HU-SS) 🏴󠁩󠁬󠁺󠁿 Flag for Northern District (IL-Z) 🏴󠁨󠁵󠁴󠁯󠁿 Flag for Tolna (HU-TO) 🏴󠁩󠁬󠁭󠁿 Flag for Central District (IL-M) 🏴󠁩󠁬󠁤󠁿 Flag for Southern District (IL-D) 🏴󠁩󠁮󠁢󠁲󠁿 Flag for Bihar (IN-BR) 🏴󠁨󠁵󠁺󠁥󠁿 Flag for Zalaegerszeg (HU-ZE) 🏴󠁩󠁮󠁡󠁰󠁿 Flag for Andhra Pradesh (IN-AP) 🏴󠁩󠁱󠁤󠁡󠁿 Flag for Dohuk (IQ-DA) 🏴󠁩󠁮󠁪󠁨󠁿 Flag for Jharkhand (IN-JH) 🏴󠁩󠁮󠁫󠁬󠁿 Flag for Kerala (IN-KL) 🏴󠁩󠁮󠁷󠁢󠁿 Flag for West Bengal (IN-WB) 🏴󠁩󠁮󠁯󠁲󠁿 Flag for Odisha (IN-OR) 🏴󠁩󠁮󠁰󠁹󠁿 Flag for Puducherry (IN-PY) 🏴󠁩󠁱󠁫󠁡󠁿 Flag for Karbala (IQ-KA) 🏴󠁩󠁱󠁳󠁤󠁿 Flag for Saladin (IQ-SD) 🏴󠁩󠁮󠁭󠁺󠁿 Flag for Mizoram (IN-MZ) 🏴󠁩󠁮󠁨󠁰󠁿 Flag for Himachal Pradesh (IN-HP) 🏴󠁩󠁮󠁭󠁰󠁿 Flag for Madhya Pradesh (IN-MP) 🏴󠁩󠁮󠁰󠁢󠁿 Flag for Punjab (IN-PB) 🏴󠁩󠁮󠁮󠁬󠁿 Flag for Nagaland (IN-NL) 🏴󠁩󠁱󠁱󠁡󠁿 Flag for Al-Qādisiyyah (IQ-QA) 🏴󠁩󠁱󠁤󠁩󠁿 Flag for Diyala (IQ-DI) 🏴󠁩󠁱󠁮󠁩󠁿 Flag for Nineveh (IQ-NI) 🏴󠁩󠁱󠁤󠁱󠁿 Flag for Dhi Qar (IQ-DQ) 🏴󠁩󠁮󠁭󠁬󠁿 Flag for Meghalaya (IN-ML) 🏴󠁩󠁮󠁴󠁮󠁿 Flag for Tamil Nadu (IN-TN) 🏴󠁩󠁱󠁮󠁡󠁿 Flag for Najaf (IQ-NA) 🏴󠁩󠁱󠁭󠁵󠁿 Flag for Al Muthanna (IQ-MU) 🏴󠁩󠁮󠁴󠁧󠁿 Flag for Telangana (IN-TG) 🏴󠁩󠁮󠁨󠁲󠁿 Flag for Haryana (IN-HR) 🏴󠁩󠁮󠁵󠁴󠁿 Flag for Uttarakhand (IN-UT) 🏴󠁩󠁮󠁴󠁲󠁿 Flag for Tripura (IN-TR) 🏴󠁩󠁱󠁢󠁧󠁿 Flag for Baghdad (IQ-BG) 🏴󠁩󠁮󠁬󠁤󠁿 Flag for Lakshadweep (IN-LD) 🏴󠁩󠁱󠁭󠁡󠁿 Flag for Maysan (IQ-MA) 🏴󠁩󠁱󠁢󠁡󠁿 Flag for Basra (IQ-BA) 🏴󠁩󠁱󠁡󠁲󠁿 Flag for Erbil (IQ-AR) 🏴󠁩󠁮󠁭󠁨󠁿 Flag for Maharashtra (IN-MH) 🏴󠁩󠁱󠁡󠁮󠁿 Flag for Al Anbar (IQ-AN) 🏴󠁩󠁮󠁳󠁫󠁿 Flag for Sikkim (IN-SK) 🏴󠁩󠁱󠁢󠁢󠁿 Flag for Babylon (IQ-BB) 🏴󠁩󠁮󠁵󠁰󠁿 Flag for Uttar Pradesh (IN-UP) 🏴󠁩󠁱󠁳󠁵󠁿 Flag for Sulaymaniyah (IQ-SU) 🏴󠁩󠁮󠁲󠁪󠁿 Flag for Rajasthan (IN-RJ) 🏴󠁩󠁮󠁪󠁫󠁿 Flag for Jammu and Kashmir (IN-JK) 🏴󠁩󠁲󠀰󠀸󠁿 Flag for Chaharmahal and Bakhtiari (IR-08) 🏴󠁩󠁲󠀲󠀶󠁿 Flag for Qom (IR-26) 🏴󠁩󠁳󠀱󠁿 Flag for Capital (IS-1) 👨🏾‍👨🏾‍👦🏾 Family - Man: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁩󠁲󠀰󠀳󠁿 Flag for Ardabil (IR-03) 🏴󠁩󠁲󠀲󠀵󠁿 Flag for Yazd (IR-25) 🏴󠁩󠁲󠀲󠀹󠁿 Flag for South Khorasan (IR-29) 👨🏿‍👨🏿‍👦🏿 Family - Man: Dark Skin Tone, Man: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁩󠁲󠀲󠀴󠁿 Flag for Hamadan (IR-24) 🏴󠁧󠁹󠁭󠁡󠁿 Flag for Mahaica-Berbice (GY-MA) 🏴󠁩󠁳󠀳󠁿 Flag for Western (IS-3) 🏴󠁩󠁲󠀲󠀷󠁿 Flag for Golestan (IR-27) 🏴󠁩󠁲󠀱󠀱󠁿 Flag for Zanjan (IR-11) 🏴󠁩󠁲󠀲󠀰󠁿 Flag for Lorestan (IR-20) 🏴󠁩󠁲󠀱󠀷󠁿 Flag for Kermanshah (IR-17) 🏴󠁩󠁲󠀱󠀸󠁿 Flag for Kohgiluyeh and Boyer-Ahmad (IR-18) 🏴󠁥󠁧󠁣󠁿 Flag for Cairo (EG-C) 🏴󠁩󠁲󠀳󠀱󠁿 Flag for North Khorasan (IR-31) 🏴󠁩󠁲󠀰󠀶󠁿 Flag for Bushehr (IR-06) 🏴󠁥󠁳󠁥󠁸󠁿 Flag for Extremadura (ES-EX) 🏴󠁥󠁳󠁣󠁮󠁿 Flag for Canary Islands (ES-CN) 🏴󠁩󠁳󠀷󠁿 Flag for Eastern (IS-7) 🏴󠁩󠁲󠀰󠀵󠁿 Flag for Ilam (IR-05) 🏴󠁩󠁲󠀲󠀸󠁿 Flag for Qazvin (IR-28) 🏴󠁩󠁲󠀰󠀴󠁿 Flag for Isfahan (IR-04) 🏴󠁩󠁲󠀱󠀵󠁿 Flag for Kerman (IR-15) 🏴󠁩󠁲󠀲󠀳󠁿 Flag for Hormozgan (IR-23) 🏴󠁩󠁱󠁷󠁡󠁿 Flag for Wasit (IQ-WA) 🏴󠁩󠁴󠀲󠀱󠁿 Flag for Piedmont (IT-21) 🏴󠁩󠁳󠀶󠁿 Flag for Northeastern (IS-6) 🏴󠁩󠁳󠀵󠁿 Flag for Northwestern (IS-5) 🏴󠁩󠁲󠀲󠀲󠁿 Flag for Markazi (IR-22) 🏴󠁩󠁲󠀱󠀹󠁿 Flag for Gilan (IR-19) 🏴󠁩󠁲󠀱󠀰󠁿 Flag for Khuzestan (IR-10) 🏴󠁩󠁲󠀱󠀲󠁿 Flag for Semnan (IR-12) 🏴󠁩󠁳󠀲󠁿 Flag for Southern Peninsula (IS-2) 🏴󠁪󠁭󠀱󠀲󠁿 Flag for Manchester (JM-12) 🏴󠁪󠁯󠁩󠁲󠁿 Flag for Irbid (JO-IR) 🏴󠁪󠁭󠀰󠀵󠁿 Flag for Saint Mary (JM-05) 🏴󠁩󠁴󠀷󠀷󠁿 Flag for Basilicata (IT-77) 🏴󠁩󠁴󠀳󠀶󠁿 Flag for Friuli–Venezia Giulia (IT-36) 🏴󠁪󠁭󠀱󠀳󠁿 Flag for Clarendon (JM-13) 🏴󠁩󠁴󠀵󠀷󠁿 Flag for Marche (IT-57) 🏴󠁪󠁭󠀰󠀴󠁿 Flag for Portland (JM-04) 🏴󠁩󠁴󠀸󠀲󠁿 Flag for Sicily (IT-82) 🏴󠁩󠁴󠀳󠀴󠁿 Flag for Veneto (IT-34) 🏴󠁩󠁴󠀶󠀵󠁿 Flag for Abruzzo (IT-65) 🏴󠁩󠁴󠀶󠀷󠁿 Flag for Molise (IT-67) 🏴󠁪󠁯󠁢󠁡󠁿 Flag for Balqa (JO-BA) 🏴󠁩󠁴󠀷󠀵󠁿 Flag for Apulia (IT-75) 🏴󠁩󠁴󠀷󠀸󠁿 Flag for Calabria (IT-78) 🏴󠁩󠁴󠀵󠀲󠁿 Flag for Tuscany (IT-52) 🏴󠁪󠁭󠀰󠀹󠁿 Flag for Hanover (JM-09) 🏴󠁪󠁭󠀰󠀲󠁿 Flag for Saint Andrew (JM-02) 🏴󠁪󠁯󠁡󠁴󠁿 Flag for Tafilah (JO-AT) 🏴󠁩󠁴󠀵󠀵󠁿 Flag for Umbria (IT-55) 🏴󠁪󠁭󠀰󠀸󠁿 Flag for Saint James (JM-08) 🏴󠁪󠁭󠀰󠀶󠁿 Flag for Saint Ann (JM-06) 🏴󠁪󠁭󠀱󠀱󠁿 Flag for Saint Elizabeth (JM-11) 🏴󠁪󠁯󠁡󠁺󠁿 Flag for Zarqa (JO-AZ) 🏴󠁦󠁩󠀱󠀲󠁿 Flag for Ostrobothnia (FI-12) 🏴󠁩󠁴󠀶󠀲󠁿 Flag for Lazio (IT-62) 🏴󠁪󠁯󠁡󠁪󠁿 Flag for Ajloun (JO-AJ) 🏴󠁩󠁴󠀴󠀲󠁿 Flag for Liguria (IT-42) 🏴󠁪󠁭󠀰󠀷󠁿 Flag for Trelawny (JM-07) 🏴󠁪󠁯󠁡󠁱󠁿 Flag for Aqaba (JO-AQ) 🏴󠁪󠁯󠁪󠁡󠁿 Flag for Jerash (JO-JA) 🏴󠁪󠁯󠁡󠁭󠁿 Flag for Amman (JO-AM) 🏴󠁩󠁴󠀲󠀳󠁿 Flag for Aosta Valley (IT-23) 🏴󠁪󠁭󠀱󠀰󠁿 Flag for Westmoreland (JM-10) 🏴󠁪󠁰󠀰󠀸󠁿 Flag for Ibaraki (JP-08) 🏴󠁪󠁯󠁭󠁤󠁿 Flag for Madaba (JO-MD) 🏴󠁪󠁰󠀳󠀲󠁿 Flag for Shimane (JP-32) 🏴󠁪󠁰󠀲󠀶󠁿 Flag for Kyōto (JP-26) 🏴󠁣󠁬󠁡󠁲󠁿 Flag for Araucanía (CL-AR) 🏴󠁪󠁰󠀰󠀹󠁿 Flag for Tochigi (JP-09) 🏴󠁪󠁰󠀰󠀵󠁿 Flag for Akita (JP-05) 🏴󠁪󠁰󠀱󠀲󠁿 Flag for Chiba (JP-12) 🏴󠁪󠁰󠀰󠀴󠁿 Flag for Miyagi (JP-04) 🏴󠁪󠁰󠀱󠀵󠁿 Flag for Niigata (JP-15) 🏴󠁪󠁰󠀱󠀶󠁿 Flag for Toyama (JP-16) 🏴󠁪󠁰󠀲󠀳󠁿 Flag for Aichi (JP-23) 🏴󠁪󠁰󠀳󠀶󠁿 Flag for Tokushima (JP-36) 🏴󠁪󠁰󠀲󠀰󠁿 Flag for Nagano (JP-20) 🏴󠁪󠁰󠀳󠀱󠁿 Flag for Tottori (JP-31) 🏴󠁪󠁰󠀰󠀳󠁿 Flag for Iwate (JP-03) 🏴󠁪󠁰󠀳󠀳󠁿 Flag for Okayama (JP-33) 🏴󠁪󠁰󠀱󠀷󠁿 Flag for Ishikawa (JP-17) 🏴󠁪󠁰󠀳󠀰󠁿 Flag for Wakayama (JP-30) 🏴󠁪󠁰󠀱󠀰󠁿 Flag for Gunma (JP-10) 🏴󠁪󠁯󠁭󠁡󠁿 Flag for Mafraq (JO-MA) 🏴󠁪󠁰󠀳󠀵󠁿 Flag for Yamaguchi (JP-35) 🏴󠁣󠁵󠀱󠀲󠁿 Flag for Granma (CU-12) 🏴󠁪󠁰󠀲󠀵󠁿 Flag for Shiga (JP-25) 🏴󠁪󠁰󠀰󠀲󠁿 Flag for Aomori (JP-02) 🏴󠁪󠁰󠀱󠀱󠁿 Flag for Saitama (JP-11) 🏴󠁪󠁰󠀲󠀹󠁿 Flag for Nara (JP-29) 🏴󠁪󠁰󠀱󠀹󠁿 Flag for Yamanashi (JP-19) 🏴󠁪󠁰󠀳󠀴󠁿 Flag for Hiroshima (JP-34) 🏴󠁪󠁯󠁭󠁮󠁿 Flag for Ma’an (JO-MN) 🏴󠁪󠁰󠀲󠀲󠁿 Flag for Shizuoka (JP-22) 🏴󠁪󠁰󠀲󠀷󠁿 Flag for Ōsaka (JP-27) 🏴󠁪󠁰󠀲󠀴󠁿 Flag for Mie (JP-24) 🏴󠁪󠁰󠀰󠀶󠁿 Flag for Yamagata (JP-06) 🏴󠁪󠁰󠀲󠀸󠁿 Flag for Hyōgo (JP-28) 🏴󠁪󠁯󠁫󠁡󠁿 Flag for Karak (JO-KA) 🏴󠁪󠁰󠀳󠀸󠁿 Flag for Ehime (JP-38) 🏴󠁪󠁰󠀱󠀴󠁿 Flag for Kanagawa (JP-14) 🏴󠁪󠁰󠀳󠀷󠁿 Flag for Kagawa (JP-37) 🏴󠁫󠁥󠀰󠀷󠁿 Flag for Garissa (KE-07) 🏴󠁫󠁥󠀲󠀴󠁿 Flag for Mandera (KE-24) 🏴󠁪󠁰󠀴󠀶󠁿 Flag for Kagoshima (JP-46) 🏴󠁫󠁥󠀱󠀷󠁿 Flag for Kisumu (KE-17) 🏴󠁫󠁥󠀱󠀴󠁿 Flag for Kilifi (KE-14) 🏴󠁫󠁥󠀱󠀵󠁿 Flag for Kirinyaga (KE-15) 🏴󠁫󠁥󠀱󠀰󠁿 Flag for Kajiado (KE-10) 🏴󠁫󠁥󠀰󠀳󠁿 Flag for Bungoma (KE-03) 🏴󠁫󠁥󠀳󠀲󠁿 Flag for Nandi (KE-32) 🏴󠁫󠁥󠀱󠀳󠁿 Flag for Kiambu (KE-13) 🏴󠁫󠁥󠀲󠀰󠁿 Flag for Laikipia (KE-20) 🏴󠁫󠁥󠀲󠀱󠁿 Flag for Lamu (KE-21) 🏴󠁪󠁰󠀴󠀰󠁿 Flag for Fukuoka (JP-40) 🏴󠁫󠁥󠀰󠀴󠁿 Flag for Busia (KE-04) 🏴󠁪󠁰󠀴󠀱󠁿 Flag for Saga (JP-41) 🏴󠁫󠁥󠀲󠀷󠁿 Flag for Migori (KE-27) 🏴󠁫󠁥󠀰󠀶󠁿 Flag for Embu (KE-06) 👩🏾‍👦🏾‍👧🏾 Family - Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone, Girl: Medium-Dark Skin Tone 🏴󠁫󠁥󠀱󠀲󠁿 Flag for Kericho (KE-12) 🏴󠁫󠁥󠀰󠀹󠁿 Flag for Isiolo (KE-09) 🏴󠁫󠁥󠀱󠀹󠁿 Flag for Kwale (KE-19) 🏴󠁪󠁰󠀴󠀲󠁿 Flag for Nagasaki (JP-42) 🏴󠁫󠁥󠀳󠀰󠁿 Flag for Nairobi County (KE-30) 🏴󠁫󠁥󠀲󠀳󠁿 Flag for Makueni (KE-23) 🏴󠁫󠁥󠀲󠀹󠁿 Flag for Murang’a (KE-29) 🏴󠁪󠁰󠀳󠀹󠁿 Flag for Kōchi (JP-39) 🏴󠁫󠁥󠀰󠀲󠁿 Flag for Bomet (KE-02) 🏴󠁫󠁥󠀲󠀸󠁿 Flag for Mombasa (KE-28) 🏴󠁫󠁥󠀰󠀸󠁿 Flag for Homa Bay (KE-08) 🏴󠁫󠁥󠀱󠀱󠁿 Flag for Kakamega (KE-11) 🏴󠁫󠁥󠀲󠀲󠁿 Flag for Machakos (KE-22) 🏴󠁫󠁥󠀱󠀶󠁿 Flag for Kisii (KE-16) 🏴󠁫󠁥󠀰󠀵󠁿 Flag for Elgeyo-Marakwet (KE-05) 🏴󠁪󠁰󠀴󠀴󠁿 Flag for Ōita (JP-44) 🏴󠁫󠁥󠀳󠀳󠁿 Flag for Narok (KE-33) 🏴󠁫󠁥󠀲󠀶󠁿 Flag for Meru (KE-26) 🏴󠁪󠁰󠀴󠀳󠁿 Flag for Kumamoto (JP-43) 🏴󠁪󠁰󠀴󠀵󠁿 Flag for Miyazaki (JP-45) 🏴󠁫󠁨󠀱󠀹󠁿 Flag for Stung Treng (KH-19) 🏴󠁫󠁥󠀳󠀷󠁿 Flag for Samburu (KE-37) 🏴󠁫󠁥󠀴󠀷󠁿 Flag for West Pokot (KE-47) 🏴󠁫󠁥󠀳󠀹󠁿 Flag for Taita-Taveta (KE-39) 🏴󠁫󠁨󠀱󠀴󠁿 Flag for Prey Veng (KH-14) 🏴󠁫󠁥󠀴󠀱󠁿 Flag for Tharaka-Nithi (KE-41) 🏴󠁫󠁧󠁯󠁿 Flag for Osh Region (KG-O) 🏴󠁫󠁨󠀲󠀵󠁿 Flag for Tbong Khmum (KH-25) 🏴󠁫󠁧󠁴󠁿 Flag for Talas (KG-T) 🏴󠁫󠁨󠀱󠀲󠁿 Flag for Phnom Penh (KH-12) 🏴󠁫󠁧󠁧󠁢󠁿 Flag for Bishkek (KG-GB) 🏴󠁫󠁥󠀴󠀴󠁿 Flag for Uasin Gishu (KE-44) 🏴󠁫󠁨󠀲󠀳󠁿 Flag for Kep (KH-23) 🏴󠁫󠁨󠀱󠀰󠁿 Flag for Kratié (KH-10) 🏴󠁫󠁨󠀲󠀱󠁿 Flag for Takéo (KH-21) 🏴󠁫󠁨󠀲󠁿 Flag for Battambang (KH-2) 🏴󠁫󠁥󠀳󠀶󠁿 Flag for Nyeri (KE-36) 🏴󠁫󠁨󠀱󠀳󠁿 Flag for Preah Vihear (KH-13) 🏴󠁫󠁥󠀴󠀰󠁿 Flag for Tana River (KE-40) 🏴󠁫󠁨󠀲󠀴󠁿 Flag for Pailin (KH-24) 🏴󠁫󠁨󠀱󠀶󠁿 Flag for Ratanakiri (KH-16) 🏴󠁫󠁨󠀲󠀲󠁿 Flag for Oddar Meanchey (KH-22) 🏴󠁫󠁥󠀴󠀲󠁿 Flag for Trans Nzoia (KE-42) 🏴󠁫󠁨󠀱󠀸󠁿 Flag for Sihanoukville (KH-18) 🏴󠁫󠁥󠀴󠀵󠁿 Flag for Vihiga (KE-45) 🏴󠁫󠁧󠁧󠁯󠁿 Flag for Osh (KG-GO) 🏴󠁫󠁧󠁢󠁿 Flag for Batken (KG-B) 🏴󠁫󠁧󠁪󠁿 Flag for Jalal-Abad (KG-J) 🏴󠁫󠁨󠀱󠀱󠁿 Flag for Mondulkiri (KH-11) 🏴󠁫󠁨󠀱󠀷󠁿 Flag for Siem Reap (KH-17) 🏴󠁫󠁥󠀴󠀳󠁿 Flag for Turkana (KE-43) 🏴󠁫󠁨󠀱󠁿 Flag for Banteay Meanchey (KH-1) 🏴󠁫󠁧󠁮󠁿 Flag for Naryn (KG-N) 🏴󠁫󠁥󠀳󠀵󠁿 Flag for Nyandarua (KE-35) 🏴󠁫󠁥󠀳󠀸󠁿 Flag for Siaya (KE-38) 🏴󠁫󠁥󠀳󠀴󠁿 Flag for Nyamira (KE-34) 🏴󠁫󠁨󠀱󠀵󠁿 Flag for Pursat (KH-15) 🏴󠁫󠁥󠀴󠀶󠁿 Flag for Wajir (KE-46) 🏴󠁫󠁧󠁹󠁿 Flag for Issyk-Kul (KG-Y) 🏴󠁫󠁧󠁣󠁿 Flag for Chuy (KG-C) 🏴󠁫󠁭󠁭󠁿 Flag for Mohéli (KM-M) 🏴󠁫󠁲󠀱󠀱󠁿 Flag for Seoul (KR-11) 🏴󠁫󠁨󠀴󠁿 Flag for Kampong Chhnang (KH-4) 🏴󠁫󠁲󠀳󠀰󠁿 Flag for Daejeon (KR-30) 🏴󠁫󠁰󠀰󠀵󠁿 Flag for South Hwanghae (KP-05) 🏴󠁫󠁨󠀷󠁿 Flag for Kampot (KH-7) 🏴󠁫󠁮󠁮󠁿 Flag for Nevis (KN-N) 🏴󠁫󠁰󠀰󠀴󠁿 Flag for Chagang (KP-04) 🏴󠁫󠁲󠀴󠀶󠁿 Flag for South Jeolla (KR-46) 🏴󠁫󠁰󠀰󠀶󠁿 Flag for North Hwanghae (KP-06) 🏴󠁫󠁮󠁫󠁿 Flag for Saint Kitts (KN-K) 🏴󠁫󠁨󠀵󠁿 Flag for Kampong Speu (KH-5) 🏴󠁫󠁲󠀴󠀵󠁿 Flag for North Jeolla (KR-45) 🏴󠁫󠁰󠀰󠀳󠁿 Flag for North Pyongan (KP-03) 🏴󠁫󠁨󠀹󠁿 Flag for Koh Kong (KH-9) 🏴󠁫󠁰󠀰󠀷󠁿 Flag for Kangwon (KP-07) 🏴󠁫󠁲󠀲󠀶󠁿 Flag for Busan (KR-26) 🏴󠁫󠁲󠀲󠀹󠁿 Flag for Gwangju City (KR-29) 🏴󠁫󠁨󠀳󠁿 Flag for Kampong Cham (KH-3) 🏴󠁫󠁲󠀴󠀳󠁿 Flag for North Chungcheong (KR-43) 🏴󠁫󠁨󠀸󠁿 Flag for Kandal (KH-8) 🏴󠁫󠁨󠀶󠁿 Flag for Kampong Thom (KH-6) 🏴󠁫󠁰󠀱󠀰󠁿 Flag for Ryanggang (KP-10) 🏴󠁫󠁰󠀰󠀲󠁿 Flag for South Pyongan (KP-02) 🏴󠁫󠁭󠁧󠁿 Flag for Grande Comore (KM-G) 🏴󠁫󠁰󠀰󠀸󠁿 Flag for South Hamgyong (KP-08) 🏴󠁫󠁰󠀱󠀳󠁿 Flag for Rason (KP-13) 🏴󠁫󠁲󠀲󠀷󠁿 Flag for Daegu (KR-27) 🏴󠁫󠁲󠀲󠀸󠁿 Flag for Incheon (KR-28) 🏴󠁫󠁲󠀴󠀲󠁿 Flag for Gangwon (KR-42) 🏴󠁫󠁰󠀰󠀱󠁿 Flag for Pyongyang (KP-01) 🏴󠁫󠁲󠀳󠀱󠁿 Flag for Ulsan (KR-31) 🏴󠁫󠁲󠀴󠀴󠁿 Flag for South Chungcheong (KR-44) 🏴󠁫󠁭󠁡󠁿 Flag for Anjouan (KM-A) 🏴󠁫󠁲󠀴󠀱󠁿 Flag for Gyeonggi (KR-41) 🏴󠁫󠁲󠀴󠀷󠁿 Flag for North Gyeongsang (KR-47) 🏴󠁫󠁰󠀰󠀹󠁿 Flag for North Hamgyong (KP-09) 🏴󠁬󠁡󠁨󠁯󠁿 Flag for Houaphanh (LA-HO) 🏴󠁫󠁺󠁢󠁡󠁹󠁿 Flag for Bayqongyr (KZ-BAY) 🏴󠁬󠁡󠁣󠁨󠁿 Flag for Champasak (LA-CH) 🏴󠁬󠁡󠁶󠁴󠁿 Flag for Vientiane (LA-VT) 🏴󠁫󠁷󠁨󠁡󠁿 Flag for Hawalli (KW-HA) 🏴󠁬󠁡󠁰󠁨󠁿 Flag for Phongsaly (LA-PH) 🏴󠁫󠁺󠁰󠁡󠁶󠁿 Flag for Pavlodar (KZ-PAV) 🏴󠁫󠁺󠁡󠁬󠁭󠁿 Flag for Almaty Region (KZ-ALM) 🏴󠁫󠁷󠁫󠁵󠁿 Flag for Al Asimah (KW-KU) 🏴󠁬󠁡󠁢󠁫󠁿 Flag for Bokeo (LA-BK) 🏴󠁬󠁡󠁡󠁴󠁿 Flag for Attapeu (LA-AT) 🏴󠁫󠁺󠁡󠁫󠁴󠁿 Flag for Aktobe (KZ-AKT) 🏴󠁫󠁺󠁡󠁴󠁹󠁿 Flag for Atyrau (KZ-ATY) 🏴󠁫󠁷󠁪󠁡󠁿 Flag for Al Jahra (KW-JA) 🏴󠁬󠁡󠁢󠁬󠁿 Flag for Bolikhamsai (LA-BL) 🏴󠁬󠁡󠁯󠁵󠁿 Flag for Oudomxay (LA-OU) 🏴󠁫󠁺󠁭󠁡󠁮󠁿 Flag for Mangystau (KZ-MAN) 🏴󠁫󠁺󠁺󠁡󠁰󠁿 Flag for West Kazakhstan (KZ-ZAP) 🏴󠁫󠁺󠁺󠁨󠁡󠁿 Flag for Jambyl (KZ-ZHA) 🏴󠁫󠁺󠁡󠁳󠁴󠁿 Flag for Astana (KZ-AST) 🏴󠁬󠁡󠁬󠁰󠁿 Flag for Luang Prabang (LA-LP) 🏴󠁫󠁷󠁦󠁡󠁿 Flag for Al Farwaniyah (KW-FA) 🏴󠁫󠁺󠁫󠁵󠁳󠁿 Flag for Kostanay (KZ-KUS) 🏴󠁫󠁺󠁡󠁬󠁡󠁿 Flag for Almaty (KZ-ALA) 🏴󠁫󠁺󠁫󠁡󠁲󠁿 Flag for Karagandy (KZ-KAR) 🏴󠁫󠁺󠁫󠁺󠁹󠁿 Flag for Kyzylorda (KZ-KZY) 🏴󠁬󠁡󠁳󠁬󠁿 Flag for Salavan (LA-SL) 🏴󠁬󠁡󠁬󠁭󠁿 Flag for Luang Namtha (LA-LM) 🏴󠁫󠁲󠀵󠀰󠁿 Flag for Sejong (KR-50) 🏴󠁫󠁷󠁭󠁵󠁿 Flag for Mubarak Al-Kabeer (KW-MU) 🏴󠁫󠁺󠁳󠁥󠁶󠁿 Flag for North Kazakhstan (KZ-SEV) 👩🏿‍👦🏿‍👧🏿 Family - Woman: Dark Skin Tone, Boy: Dark Skin Tone, Girl: Dark Skin Tone 🏴󠁫󠁷󠁡󠁨󠁿 Flag for Al Ahmadi (KW-AH) 🏴󠁬󠁡󠁫󠁨󠁿 Flag for Khammouane (LA-KH) 🏴󠁫󠁺󠁡󠁫󠁭󠁿 Flag for Akmola (KZ-AKM) 🏴󠁫󠁺󠁹󠁵󠁺󠁿 Flag for South Kazakhstan (KZ-YUZ) 🏴󠁬󠁩󠀰󠀹󠁿 Flag for Triesen (LI-09) 👨🏽‍👨🏽‍👦🏽‍👦🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Boy: Medium Skin Tone 👩🏻‍👦🏻‍👶🏻 Family - Woman: Light Skin Tone, Boy: Light Skin Tone, Baby: Light Skin Tone 🏴󠁬󠁫󠀷󠁿 Flag for North Central (LK-7) 🏴󠁬󠁡󠁸󠁡󠁿 Flag for Sainyabuli (LA-XA) 🏴󠁬󠁢󠁡󠁫󠁿 Flag for Akkar (LB-AK) 🏴󠁬󠁣󠀰󠀷󠁿 Flag for Laborie (LC-07) 🏴󠁬󠁣󠀰󠀶󠁿 Flag for Gros Islet (LC-06) 🏴󠁬󠁢󠁡󠁳󠁿 Flag for North (LB-AS) 🏴󠁬󠁩󠀰󠀱󠁿 Flag for Balzers (LI-01) 🏴󠁬󠁫󠀲󠁿 Flag for Central (LK-2) 🏴󠁬󠁩󠀰󠀴󠁿 Flag for Mauren (LI-04) 🏴󠁬󠁢󠁮󠁡󠁿 Flag for Nabatieh (LB-NA) 🏴󠁬󠁣󠀰󠀵󠁿 Flag for Dennery (LC-05) 🏴󠁬󠁢󠁪󠁡󠁿 Flag for South (LB-JA) 🏴󠁬󠁩󠀱󠀱󠁿 Flag for Vaduz (LI-11) 🏴󠁬󠁣󠀰󠀲󠁿 Flag for Castries (LC-02) 🏴󠁬󠁫󠀸󠁿 Flag for Uva (LK-8) 🏴󠁬󠁩󠀱󠀰󠁿 Flag for Triesenberg (LI-10) 🏴󠁬󠁩󠀰󠀵󠁿 Flag for Planken (LI-05) 🏴󠁬󠁣󠀱󠀱󠁿 Flag for Vieux Fort (LC-11) 🏴󠁬󠁢󠁢󠁨󠁿 Flag for Baalbek-Hermel (LB-BH) 🏴󠁬󠁫󠀶󠁿 Flag for North Western (LK-6) 🏴󠁬󠁩󠀰󠀶󠁿 Flag for Ruggell (LI-06) 🏴󠁬󠁣󠀰󠀸󠁿 Flag for Micoud (LC-08) 🏴󠁬󠁩󠀰󠀲󠁿 Flag for Eschen (LI-02) 🏴󠁬󠁣󠀱󠀲󠁿 Flag for Canaries (LC-12) 🏴󠁬󠁢󠁢󠁡󠁿 Flag for Beirut (LB-BA) 🏴󠁬󠁡󠁸󠁩󠁿 Flag for Xiangkhouang (LA-XI) 🏴󠁬󠁣󠀱󠀰󠁿 Flag for Soufrière (LC-10) 🏴󠁬󠁣󠀰󠀱󠁿 Flag for Anse la Raye (LC-01) 🏴󠁬󠁣󠀰󠀳󠁿 Flag for Choiseul (LC-03) 🏴󠁬󠁩󠀰󠀳󠁿 Flag for Gamprin (LI-03) 🏴󠁬󠁫󠀴󠁿 Flag for Northern (LK-4) 🏴󠁬󠁲󠁧󠁢󠁿 Flag for Grand Bassa (LR-GB) 🏴󠁬󠁲󠁧󠁰󠁿 Flag for Gbarpolu (LR-GP) 🏴󠁬󠁲󠁧󠁧󠁿 Flag for Grand Gedeh (LR-GG) 🏴󠁬󠁴󠀱󠀲󠁿 Flag for Jurbarkas (LT-12) 🏴󠁬󠁲󠁮󠁩󠁿 Flag for Nimba (LR-NI) 🏴󠁦󠁩󠀰󠀸󠁿 Flag for Central Finland (FI-08) 🏴󠁬󠁴󠀱󠀰󠁿 Flag for Jonava (LT-10) 🏴󠁬󠁲󠁭󠁧󠁿 Flag for Margibi (LR-MG) 🏴󠁬󠁲󠁳󠁩󠁿 Flag for Sinoe (LR-SI) 🏴󠁬󠁲󠁭󠁯󠁿 Flag for Montserrado (LR-MO) 🏴󠁬󠁴󠀱󠀶󠁿 Flag for Kaunas (LT-16) 🏴󠁬󠁳󠁫󠁿 Flag for Thaba-Tseka (LS-K) 🏴󠁬󠁴󠀰󠀵󠁿 Flag for Birštonas (LT-05) 🏴󠁬󠁳󠁦󠁿 Flag for Mohale’s Hoek (LS-F) 🏴󠁬󠁲󠁢󠁭󠁿 Flag for Bomi (LR-BM) 🏴󠁬󠁴󠀰󠀷󠁿 Flag for Druskininkai (LT-07) 🏴󠁬󠁴󠀱󠀴󠁿 Flag for Kalvarija (LT-14) 🏴󠁬󠁴󠀱󠀵󠁿 Flag for Kauno Municipality (LT-15) 🏴󠁬󠁳󠁨󠁿 Flag for Qacha’s Nek (LS-H) 🏴󠁬󠁴󠀰󠀴󠁿 Flag for Anykščiai (LT-04) 🏴󠁬󠁳󠁣󠁿 Flag for Leribe (LS-C) 🏴󠁬󠁴󠀱󠀱󠁿 Flag for Joniškis (LT-11) 🏴󠁬󠁲󠁬󠁯󠁿 Flag for Lofa (LR-LO) 🏴󠁬󠁲󠁲󠁩󠁿 Flag for Rivercess (LR-RI) 🏴󠁬󠁴󠀱󠀳󠁿 Flag for Kaišiadorys (LT-13) 🏴󠁬󠁴󠀰󠀸󠁿 Flag for Elektrėnai (LT-08) 🏴󠁬󠁲󠁧󠁫󠁿 Flag for Grand Kru (LR-GK) 🏴󠁬󠁳󠁤󠁿 Flag for Berea (LS-D) 🏴󠁬󠁳󠁧󠁿 Flag for Quthing (LS-G) 🏴󠁬󠁳󠁢󠁿 Flag for Butha-Buthe (LS-B) 🏴󠁬󠁴󠀰󠀱󠁿 Flag for Akmenė (LT-01) 🏴󠁬󠁴󠀰󠀹󠁿 Flag for Ignalina (LT-09) 🏴󠁬󠁳󠁥󠁿 Flag for Mafeteng (LS-E) 🏴󠁬󠁳󠁪󠁿 Flag for Mokhotlong (LS-J) 🏴󠁬󠁴󠀰󠀳󠁿 Flag for Alytus (LT-03) 🏴󠁬󠁴󠀰󠀶󠁿 Flag for Biržai (LT-06) 🏴󠁣󠁦󠁫󠁢󠁿 Flag for Nana-Grébizi (CF-KB) 🏴󠁬󠁲󠁲󠁧󠁿 Flag for River Gee (LR-RG) 🏴󠁬󠁴󠀵󠀴󠁿 Flag for Utena (LT-54) 🏴󠁬󠁴󠀲󠀷󠁿 Flag for Molėtai (LT-27) 🏴󠁬󠁴󠀴󠀱󠁿 Flag for Šakiai (LT-41) 🏴󠁬󠁴󠀱󠀹󠁿 Flag for Kelmė (LT-19) 🏴󠁬󠁴󠀲󠀳󠁿 Flag for Kupiškis (LT-23) 🏴󠁬󠁴󠀵󠀶󠁿 Flag for Vilkaviškis (LT-56) 🏴󠁬󠁴󠀲󠀸󠁿 Flag for Neringa (LT-28) 🏴󠁬󠁴󠀳󠀳󠁿 Flag for Panevėžys (LT-33) 🏴󠁬󠁴󠀲󠀹󠁿 Flag for Pagėgiai (LT-29) 🏴󠁬󠁴󠀴󠀳󠁿 Flag for Šiaulių Municipality (LT-43) 🏴󠁬󠁴󠀳󠀱󠁿 Flag for Palanga (LT-31) 🏴󠁬󠁴󠀱󠀸󠁿 Flag for Kėdainiai (LT-18) 🏴󠁬󠁴󠀴󠀰󠁿 Flag for Rokiškis (LT-40) 🏴󠁬󠁴󠀴󠀵󠁿 Flag for Šilalė (LT-45) 🏴󠁬󠁴󠀵󠀲󠁿 Flag for Trakai (LT-52) 🏴󠁦󠁭󠁰󠁮󠁩󠁿 Flag for Pohnpei (FM-PNI) 🏴󠁬󠁴󠀳󠀶󠁿 Flag for Prienai (LT-36) 🏴󠁬󠁴󠀵󠀱󠁿 Flag for Telšiai (LT-51) 🏴󠁬󠁴󠀲󠀱󠁿 Flag for Klaipėda (LT-21) 🏴󠁬󠁴󠀱󠀷󠁿 Flag for Kazlų Rūda (LT-17) 🏴󠁬󠁴󠀴󠀷󠁿 Flag for Širvintos (LT-47) 🏴󠁬󠁴󠀳󠀰󠁿 Flag for Pakruojis (LT-30) 🏴󠁬󠁴󠀴󠀴󠁿 Flag for Šiauliai (LT-44) 🏴󠁬󠁴󠀲󠀲󠁿 Flag for Kretinga (LT-22) 🏴󠁬󠁴󠀴󠀶󠁿 Flag for Šilutė (LT-46) 🏴󠁬󠁴󠀴󠀲󠁿 Flag for Šalčininkai (LT-42) 🏴󠁬󠁴󠀳󠀸󠁿 Flag for Raseiniai (LT-38) 🏴󠁬󠁴󠀵󠀵󠁿 Flag for Varėna (LT-55) 🏴󠁬󠁴󠀳󠀴󠁿 Flag for Pasvalys (LT-34) 🏴󠁬󠁴󠀳󠀵󠁿 Flag for Plungė (LT-35) 🏴󠁬󠁴󠀴󠀹󠁿 Flag for Švenčionys (LT-49) 🏴󠁬󠁴󠀳󠀷󠁿 Flag for Radviliškis (LT-37) 🏴󠁬󠁴󠀲󠀴󠁿 Flag for Lazdijai (LT-24) 🏴󠁬󠁴󠀵󠀰󠁿 Flag for Tauragė (LT-50) 🏴󠁬󠁴󠀴󠀸󠁿 Flag for Skuodas (LT-48) 🏴󠁬󠁴󠀵󠀳󠁿 Flag for Ukmergė (LT-53) 🏴󠁬󠁴󠀳󠀹󠁿 Flag for Rietavas (LT-39) 🏴󠁬󠁴󠀲󠀵󠁿 Flag for Marijampolė (LT-25) 🏴󠁬󠁴󠀲󠀶󠁿 Flag for Mažeikiai (LT-26) 🏴󠁬󠁶󠀰󠀱󠀳󠁿 Flag for Baldone (LV-013) 🏴󠁬󠁴󠁶󠁬󠁿 Flag for Vilnius County (LT-VL) 🏴󠁬󠁶󠀰󠀰󠀶󠁿 Flag for Alsunga (LV-006) 🏴󠁬󠁴󠀵󠀸󠁿 Flag for Vilnius (LT-58) 🏴󠁬󠁴󠁴󠁡󠁿 Flag for Tauragė County (LT-TA) 🏴󠁬󠁴󠁵󠁴󠁿 Flag for Utena County (LT-UT) 🏴󠁬󠁶󠀰󠀰󠀲󠁿 Flag for Aizkraukle (LV-002) 🏴󠁬󠁵󠁤󠁩󠁿 Flag for Diekirch (LU-DI) 🏴󠁬󠁴󠁭󠁲󠁿 Flag for Marijampolė County (LT-MR) 👩🏽‍👨🏽‍👶🏽 Family - Woman: Medium Skin Tone, Man: Medium Skin Tone, Baby: Medium Skin Tone 🏴󠁬󠁴󠁳󠁡󠁿 Flag for Šiauliai County (LT-SA) 🏴󠁬󠁵󠁥󠁣󠁿 Flag for Echternach (LU-EC) 🏴󠁬󠁵󠁲󠁤󠁿 Flag for Redange (LU-RD) 🏴󠁬󠁵󠁣󠁬󠁿 Flag for Clervaux (LU-CL) 🏴󠁬󠁴󠀵󠀹󠁿 Flag for Visaginas (LT-59) 🏴󠁬󠁶󠀰󠀰󠀹󠁿 Flag for Ape (LV-009) 🏴󠁬󠁶󠀰󠀰󠀸󠁿 Flag for Amata (LV-008) 🏴󠁬󠁴󠁡󠁬󠁿 Flag for Alytus County (LT-AL) 🏴󠁬󠁵󠁧󠁲󠁿 Flag for Grevenmacher (LU-GR) 🏴󠁬󠁶󠀰󠀰󠀱󠁿 Flag for Aglona (LV-001) 🏴󠁬󠁵󠁭󠁥󠁿 Flag for Mersch (LU-ME) 🏴󠁬󠁵󠁶󠁤󠁿 Flag for Vianden (LU-VD) 🏴󠁬󠁶󠀰󠀰󠀵󠁿 Flag for Aloja (LV-005) 🏴󠁬󠁢󠁪󠁬󠁿 Flag for Mount Lebanon (LB-JL) 🏴󠁬󠁴󠁫󠁵󠁿 Flag for Kaunas County (LT-KU) 🏴󠁬󠁴󠀶󠀰󠁿 Flag for Zarasai (LT-60) 🏴󠁬󠁵󠁷󠁩󠁿 Flag for Wiltz (LU-WI) 🏴󠁬󠁶󠀰󠀱󠀱󠁿 Flag for Ādaži (LV-011) 🏴󠁬󠁵󠁬󠁵󠁿 Flag for Luxembourg (LU-LU) 🏴󠁬󠁴󠁴󠁥󠁿 Flag for Telšiai County (LT-TE) 🏴󠁬󠁶󠀰󠀰󠀷󠁿 Flag for Alūksne (LV-007) 🏴󠁬󠁵󠁲󠁭󠁿 Flag for Remich (LU-RM) 🏴󠁬󠁶󠀰󠀰󠀴󠁿 Flag for Aknīste (LV-004) 🏴󠁬󠁵󠁥󠁳󠁿 Flag for Esch-sur-Alzette (LU-ES) 🏴󠁬󠁶󠀰󠀰󠀳󠁿 Flag for Aizpute (LV-003) 🏴󠁬󠁴󠁫󠁬󠁿 Flag for Klaipėda County (LT-KL) 🏴󠁬󠁶󠀰󠀲󠀷󠁿 Flag for Dundaga (LV-027) 🏴󠁬󠁶󠀰󠀴󠀰󠁿 Flag for Jaunpils (LV-040) 🏴󠁬󠁶󠀰󠀱󠀹󠁿 Flag for Burtnieki (LV-019) 🏴󠁬󠁶󠀰󠀱󠀵󠁿 Flag for Balvi (LV-015) 🏴󠁬󠁶󠀰󠀱󠀷󠁿 Flag for Beverīna (LV-017) 🏴󠁬󠁶󠀰󠀲󠀵󠁿 Flag for Daugavpils Municipality (LV-025) 🏴󠁬󠁶󠀰󠀲󠀱󠁿 Flag for Cesvaine (LV-021) 🏴󠁬󠁶󠀰󠀳󠀶󠁿 Flag for Ilūkste (LV-036) 🏴󠁬󠁶󠀰󠀵󠀰󠁿 Flag for Kuldīga (LV-050) 🏴󠁬󠁶󠀰󠀳󠀲󠁿 Flag for Grobiņa (LV-032) 🏴󠁬󠁶󠀰󠀳󠀳󠁿 Flag for Gulbene (LV-033) 🏴󠁬󠁶󠀰󠀴󠀳󠁿 Flag for Kandava (LV-043) 🏴󠁬󠁶󠀰󠀱󠀸󠁿 Flag for Brocēni (LV-018) 🏴󠁬󠁶󠀰󠀴󠀸󠁿 Flag for Krimulda (LV-048) 🏴󠁬󠁶󠀰󠀲󠀰󠁿 Flag for Carnikava (LV-020) 🏴󠁬󠁶󠀰󠀴󠀹󠁿 Flag for Krustpils (LV-049) 👩🏾‍👨🏾‍👶🏾 Family - Woman: Medium-Dark Skin Tone, Man: Medium-Dark Skin Tone, Baby: Medium-Dark Skin Tone 🏴󠁬󠁶󠀰󠀲󠀶󠁿 Flag for Dobele (LV-026) 🏴󠁬󠁶󠀰󠀴󠀵󠁿 Flag for Kocēni (LV-045) 🏴󠁬󠁶󠀰󠀳󠀱󠁿 Flag for Garkalne (LV-031) 🏴󠁬󠁶󠀰󠀳󠀰󠁿 Flag for Ērgļi (LV-030) 🏴󠁬󠁶󠀰󠀲󠀸󠁿 Flag for Durbe (LV-028) 🏴󠁬󠁶󠀰󠀴󠀷󠁿 Flag for Krāslava (LV-047) 🏴󠁬󠁶󠀰󠀲󠀴󠁿 Flag for Dagda (LV-024) 🏴󠁬󠁶󠀰󠀳󠀸󠁿 Flag for Jaunjelgava (LV-038) 🏴󠁬󠁶󠀰󠀱󠀶󠁿 Flag for Bauska (LV-016) 🏴󠁬󠁶󠀰󠀱󠀴󠁿 Flag for Baltinava (LV-014) 🏴󠁬󠁶󠀰󠀴󠀲󠁿 Flag for Jēkabpils Municipality (LV-042) 🏴󠁬󠁶󠀰󠀳󠀹󠁿 Flag for Jaunpiebalga (LV-039) 🏴󠁬󠁶󠀰󠀲󠀲󠁿 Flag for Cēsis (LV-022) 🏴󠁬󠁶󠀰󠀳󠀴󠁿 Flag for Iecava (LV-034) 🏴󠁬󠁶󠀰󠀵󠀱󠁿 Flag for Ķegums (LV-051) 🏴󠁬󠁶󠀰󠀳󠀵󠁿 Flag for Ikšķile (LV-035) 🏴󠁬󠁶󠀰󠀲󠀳󠁿 Flag for Cibla (LV-023) 🏴󠁬󠁶󠀰󠀴󠀴󠁿 Flag for Kārsava (LV-044) 🏴󠁬󠁶󠀰󠀲󠀹󠁿 Flag for Engure (LV-029) 🏴󠁬󠁶󠀰󠀵󠀵󠁿 Flag for Līgatne (LV-055) 🏴󠁬󠁶󠀰󠀶󠀶󠁿 Flag for Nīca (LV-066) 🏴󠁬󠁶󠀰󠀶󠀱󠁿 Flag for Mālpils (LV-061) 🏴󠁧󠁥󠁫󠁫󠁿 Flag for Kvemo Kartli (GE-KK) 🏴󠁬󠁶󠀰󠀷󠀰󠁿 Flag for Pārgauja (LV-070) 🏴󠁬󠁶󠀰󠀵󠀳󠁿 Flag for Lielvārde (LV-053) 🏴󠁬󠁶󠀰󠀷󠀲󠁿 Flag for Pļaviņas (LV-072) 🏴󠁬󠁶󠀰󠀷󠀱󠁿 Flag for Pāvilosta (LV-071) 🏴󠁬󠁶󠀰󠀵󠀹󠁿 Flag for Madona (LV-059) 🏴󠁬󠁶󠀰󠀷󠀶󠁿 Flag for Rauna (LV-076) 🏴󠁬󠁶󠀰󠀵󠀴󠁿 Flag for Limbaži (LV-054) 🏴󠁬󠁶󠀰󠀶󠀴󠁿 Flag for Naukšēni (LV-064) 🏴󠁬󠁶󠀰󠀵󠀲󠁿 Flag for Ķekava (LV-052) 🏴󠁬󠁶󠀰󠀸󠀷󠁿 Flag for Salaspils (LV-087) 🏴󠁬󠁶󠀰󠀶󠀳󠁿 Flag for Mērsrags (LV-063) 🏴󠁬󠁶󠀰󠀶󠀸󠁿 Flag for Olaine (LV-068) 🏴󠁬󠁶󠀰󠀷󠀹󠁿 Flag for Roja (LV-079) 🏴󠁬󠁶󠀰󠀸󠀱󠁿 Flag for Rucava (LV-081) 🏴󠁬󠁶󠀰󠀸󠀲󠁿 Flag for Rugāji (LV-082) 🏴󠁬󠁶󠀰󠀶󠀷󠁿 Flag for Ogre (LV-067) 🏴󠁬󠁶󠀰󠀸󠀴󠁿 Flag for Rūjiena (LV-084) 🏴󠁬󠁶󠀰󠀸󠀹󠁿 Flag for Saulkrasti (LV-089) 🏴󠁬󠁶󠀰󠀸󠀸󠁿 Flag for Saldus (LV-088) 🏴󠁬󠁶󠀰󠀸󠀳󠁿 Flag for Rundāle (LV-083) 🏴󠁬󠁶󠀰󠀶󠀵󠁿 Flag for Nereta (LV-065) 🏴󠁬󠁶󠀰󠀶󠀹󠁿 Flag for Ozolnieki (LV-069) 🏴󠁬󠁶󠀰󠀸󠀰󠁿 Flag for Ropaži (LV-080) 🏴󠁬󠁶󠀰󠀷󠀸󠁿 Flag for Riebiņi (LV-078) 🏴󠁬󠁶󠀰󠀵󠀶󠁿 Flag for Līvāni (LV-056) 🏴󠁬󠁶󠀰󠀷󠀵󠁿 Flag for Priekuļi (LV-075) 🏴󠁬󠁶󠀰󠀵󠀸󠁿 Flag for Ludza (LV-058) 🏴󠁬󠁶󠀰󠀹󠀰󠁿 Flag for Sēja (LV-090) 🏴󠁬󠁶󠀰󠀷󠀴󠁿 Flag for Priekule (LV-074) 🏴󠁬󠁶󠀰󠀵󠀷󠁿 Flag for Lubāna (LV-057) 🏴󠁬󠁶󠀰󠀸󠀶󠁿 Flag for Salacgrīva (LV-086) 🏴󠁬󠁶󠀰󠀶󠀲󠁿 Flag for Mārupe (LV-062) 🏴󠁬󠁶󠀰󠀷󠀳󠁿 Flag for Preiļi (LV-073) 🏴󠁬󠁶󠀱󠀰󠀷󠁿 Flag for Viesīte (LV-107) 🏴󠁬󠁶󠀰󠀹󠀴󠁿 Flag for Smiltene (LV-094) 🏴󠁬󠁹󠁫󠁦󠁿 Flag for Kufra (LY-KF) 🏴󠁬󠁶󠁤󠁧󠁶󠁿 Flag for Daugavpils (LV-DGV) 🏴󠁬󠁶󠀰󠀹󠀹󠁿 Flag for Tukums (LV-099) 👩🏿‍👨🏿‍👶🏿 Family - Woman: Dark Skin Tone, Man: Dark Skin Tone, Baby: Dark Skin Tone 🏴󠁬󠁶󠁬󠁰󠁸󠁿 Flag for Liepāja (LV-LPX) 🏴󠁬󠁶󠀱󠀰󠀱󠁿 Flag for Valka (LV-101) 🏴󠁬󠁶󠀱󠀰󠀳󠁿 Flag for Vārkava (LV-103) 🏴󠁬󠁹󠁭󠁢󠁿 Flag for Murqub (LY-MB) 🏴󠁬󠁶󠁶󠁥󠁮󠁿 Flag for Ventspils (LV-VEN) 🏴󠁬󠁹󠁪󠁡󠁿 Flag for Jabal al Akhdar (LY-JA) 🏴󠁬󠁶󠁪󠁫󠁢󠁿 Flag for Jēkabpils (LV-JKB) 🏴󠁬󠁶󠀰󠀹󠀱󠁿 Flag for Sigulda (LV-091) 🏴󠁬󠁹󠁪󠁧󠁿 Flag for Jabal al Gharbi (LY-JG) 🏴󠁬󠁹󠁧󠁴󠁿 Flag for Ghat (LY-GT) 🏴󠁬󠁶󠀰󠀹󠀵󠁿 Flag for Stopiņi (LV-095) 🏴󠁬󠁶󠁲󠁩󠁸󠁿 Flag for Riga (LV-RIX) 🏴󠁬󠁹󠁤󠁲󠁿 Flag for Derna (LY-DR) 🏴󠁬󠁶󠀱󠀰󠀰󠁿 Flag for Vaiņode (LV-100) 🏴󠁬󠁶󠀱󠀰󠀲󠁿 Flag for Varakļāni (LV-102) 🏴󠁬󠁶󠁪󠁥󠁬󠁿 Flag for Jelgava (LV-JEL) 🏴󠁬󠁶󠀰󠀹󠀲󠁿 Flag for Skrīveri (LV-092) 🏴󠁬󠁶󠀰󠀹󠀷󠁿 Flag for Talsi (LV-097) 🏴󠁬󠁶󠁶󠁭󠁲󠁿 Flag for Valmiera (LV-VMR) 🏴󠁬󠁹󠁢󠁡󠁿 Flag for Benghazi (LY-BA) 🏴󠁬󠁶󠁲󠁥󠁺󠁿 Flag for Rēzekne (LV-REZ) 🏴󠁬󠁶󠀰󠀹󠀳󠁿 Flag for Skrunda (LV-093) 🏴󠁬󠁶󠀱󠀱󠀰󠁿 Flag for Zilupe (LV-110) 🏴󠁬󠁶󠀰󠀹󠀶󠁿 Flag for Strenči (LV-096) 🏴󠁬󠁹󠁪󠁵󠁿 Flag for Jufra (LY-JU) 🏴󠁬󠁶󠀱󠀰󠀴󠁿 Flag for Vecpiebalga (LV-104) 🏴󠁬󠁶󠀱󠀰󠀵󠁿 Flag for Vecumnieki (LV-105) 🏴󠁬󠁶󠀱󠀰󠀸󠁿 Flag for Viļaka (LV-108) 🏴󠁬󠁶󠁪󠁵󠁲󠁿 Flag for Jūrmala (LV-JUR) 🏴󠁬󠁶󠀱󠀰󠀹󠁿 Flag for Viļāni (LV-109) 🏴󠁬󠁶󠀰󠀹󠀸󠁿 Flag for Tērvete (LV-098) 🏴󠁭󠁡󠀰󠀸󠁿 Flag for Grand Casablanca (MA-08) 🏴󠁬󠁹󠁭󠁪󠁿 Flag for Marj (LY-MJ) 🏴󠁬󠁹󠁷󠁡󠁿 Flag for Al Wahat (LY-WA) 🏴󠁭󠁣󠁭󠁣󠁿 Flag for Monte Carlo (MC-MC) 🏴󠁭󠁡󠀱󠀴󠁿 Flag for Guelmim-Es Semara (MA-14) 🏴󠁬󠁹󠁺󠁡󠁿 Flag for Zawiya (LY-ZA) 🏴󠁭󠁡󠀰󠀲󠁿 Flag for Gharb-Chrarda-Béni Hssen (MA-02) 🏴󠁭󠁡󠀱󠀱󠁿 Flag for Marrakesh-Tensift-El Haouz (MA-11) 🏴󠁭󠁡󠀱󠀰󠁿 Flag for Doukkala-Abda (MA-10) 👩🏽‍👩🏽‍👦🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Boy: Medium Skin Tone 🏴󠁭󠁡󠀰󠀷󠁿 Flag for Rabat-Salé-Zemmour-Zaer (MA-07) 🏴󠁭󠁡󠀱󠀶󠁿 Flag for Oued Ed-Dahab-Lagouira (MA-16) 🏴󠁬󠁹󠁮󠁬󠁿 Flag for Nalut (LY-NL) 🏴󠁬󠁹󠁳󠁢󠁿 Flag for Sabha (LY-SB) 🏴󠁭󠁡󠀰󠀳󠁿 Flag for Taza-Al Hoceima-Taounate (MA-03) 🏴󠁭󠁣󠁪󠁥󠁿 Flag for Jardin Exotique de Monaco (MC-JE) 🏴󠁬󠁹󠁷󠁳󠁿 Flag for Wadi al Shatii (LY-WS) 🏴󠁭󠁣󠁬󠁡󠁿 Flag for Larvotto (MC-LA) 🏴󠁬󠁹󠁮󠁱󠁿 Flag for Nuqat al Khams (LY-NQ) 🏴󠁭󠁣󠁭󠁡󠁿 Flag for Malbousquet (MC-MA) 🏴󠁭󠁡󠀱󠀲󠁿 Flag for Tadla-Azilal (MA-12) 🏴󠁭󠁣󠁣󠁯󠁿 Flag for La Condamine (MC-CO) 🏴󠁭󠁣󠁭󠁯󠁿 Flag for Monaco-Ville (MC-MO) 🏴󠁭󠁡󠀰󠀹󠁿 Flag for Chaouia-Ouardigha (MA-09) 🏴󠁭󠁡󠀰󠀱󠁿 Flag for Tangier-Tétouan (MA-01) 🏴󠁭󠁣󠁭󠁧󠁿 Flag for Moneghetti (MC-MG) 🏴󠁬󠁹󠁭󠁱󠁿 Flag for Murzuq (LY-MQ) 🏴󠁭󠁡󠀰󠀶󠁿 Flag for Meknès-Tafilalet (MA-06) 🏴󠁭󠁣󠁦󠁯󠁿 Flag for Fontvieille (MC-FO) 🏴󠁬󠁹󠁷󠁤󠁿 Flag for Wadi al Hayaa (LY-WD) 🏴󠁭󠁣󠁣󠁬󠁿 Flag for La Colle (MC-CL) 🏴󠁬󠁹󠁳󠁲󠁿 Flag for Sirte (LY-SR) 🏴󠁬󠁹󠁭󠁩󠁿 Flag for Misrata (LY-MI) 🏴󠁭󠁡󠀰󠀵󠁿 Flag for Fès-Boulemane (MA-05) 🏴󠁬󠁹󠁴󠁢󠁿 Flag for Tripoli (LY-TB) 🏴󠁭󠁣󠁧󠁡󠁿 Flag for La Gare (MC-GA) 👩🏾‍👩🏾‍👦🏾 Family - Woman: Medium-Dark Skin Tone, Woman: Medium-Dark Skin Tone, Boy: Medium-Dark Skin Tone 🏴󠁭󠁤󠁥󠁤󠁿 Flag for Edineț (MD-ED) 🏴󠁭󠁤󠁨󠁩󠁿 Flag for Hîncești (MD-HI) 🏴󠁭󠁤󠁦󠁡󠁿 Flag for Fălești (MD-FA) 🏴󠁭󠁤󠁣󠁲󠁿 Flag for Criuleni (MD-CR) 🏴󠁭󠁤󠁳󠁩󠁿 Flag for Sîngerei (MD-SI) 🏴󠁭󠁤󠁳󠁯󠁿 Flag for Soroca (MD-SO) 🏴󠁭󠁤󠁣󠁴󠁿 Flag for Cantemir (MD-CT) 🏴󠁭󠁤󠁲󠁥󠁿 Flag for Rezina (MD-RE) 🏴󠁭󠁤󠁳󠁤󠁿 Flag for Șoldănești (MD-SD) 🏴󠁭󠁤󠁢󠁲󠁿 Flag for Briceni (MD-BR) 🏴󠁭󠁣󠁶󠁲󠁿 Flag for Vallon de la Rousse (MC-VR) 🏴󠁭󠁤󠁢󠁡󠁿 Flag for Bălţi (MD-BA) 🏴󠁭󠁤󠁤󠁵󠁿 Flag for Dubăsari (MD-DU) 🏴󠁭󠁤󠁣󠁬󠁿 Flag for Călărași (MD-CL) 🏴󠁭󠁣󠁳󠁰󠁿 Flag for Spélugues (MC-SP) 🏴󠁭󠁤󠁣󠁡󠁿 Flag for Cahul (MD-CA) 🏴󠁭󠁤󠁩󠁡󠁿 Flag for Ialoveni (MD-IA) 🏴󠁭󠁤󠁯󠁲󠁿 Flag for Orhei (MD-OR) 🏴󠁭󠁤󠁤󠁲󠁿 Flag for Drochia (MD-DR) 🏴󠁭󠁤󠁧󠁡󠁿 Flag for Gagauzia (MD-GA) 🏴󠁭󠁤󠁣󠁭󠁿 Flag for Cimișlia (MD-CM) 🏴󠁭󠁤󠁯󠁣󠁿 Flag for Ocniţa (MD-OC) 🏴󠁭󠁤󠁢󠁳󠁿 Flag for Basarabeasca (MD-BS) 🏴󠁭󠁤󠁳󠁴󠁿 Flag for Strășeni (MD-ST) 🏴󠁭󠁤󠁡󠁮󠁿 Flag for Anenii Noi (MD-AN) 🏴󠁭󠁣󠁭󠁵󠁿 Flag for Moulins (MC-MU) 🏴󠁭󠁤󠁢󠁤󠁿 Flag for Bender (MD-BD) 🏴󠁭󠁤󠁧󠁬󠁿 Flag for Glodeni (MD-GL) 🏴󠁭󠁣󠁳󠁯󠁿 Flag for La Source (MC-SO) 🏴󠁭󠁤󠁣󠁵󠁿 Flag for Chișinău (MD-CU) 🏴󠁭󠁤󠁤󠁯󠁿 Flag for Dondușeni (MD-DO) 🏴󠁭󠁤󠁦󠁬󠁿 Flag for Florești (MD-FL) 🏴󠁭󠁣󠁰󠁨󠁿 Flag for Port Hercules (MC-PH) 🏴󠁭󠁤󠁮󠁩󠁿 Flag for Nisporeni (MD-NI) 🏴󠁭󠁤󠁲󠁩󠁿 Flag for Rîșcani (MD-RI) 🏴󠁭󠁤󠁬󠁥󠁿 Flag for Leova (MD-LE) 🏴󠁭󠁤󠁳󠁶󠁿 Flag for Ştefan Vodă (MD-SV) 🏴󠁭󠁤󠁵󠁮󠁿 Flag for Ungheni (MD-UN) 🏴󠁭󠁧󠁡󠁿 Flag for Toamasina (MG-A) 🏴󠁭󠁧󠁴󠁿 Flag for Antananarivo (MG-T) 🏴󠁭󠁥󠀰󠀶󠁿 Flag for Cetinje (ME-06) 🏴󠁭󠁫󠀰󠀵󠁿 Flag for Bogdanci (MK-05) 🏴󠁭󠁥󠀲󠀰󠁿 Flag for Ulcinj (ME-20) 🏴󠁭󠁥󠀰󠀹󠁿 Flag for Kolašin (ME-09) 🏴󠁭󠁫󠀰󠀷󠁿 Flag for Bosilovo (MK-07) 🏴󠁭󠁥󠀱󠀴󠁿 Flag for Pljevlja (ME-14) 🏴󠁭󠁤󠁴󠁥󠁿 Flag for Telenești (MD-TE) 🏴󠁭󠁫󠀰󠀶󠁿 Flag for Bogovinje (MK-06) 🏴󠁭󠁥󠀲󠀱󠁿 Flag for Žabljak (ME-21) 🏴󠁭󠁥󠀰󠀸󠁿 Flag for Herceg Novi (ME-08) 🏴󠁭󠁥󠀲󠀳󠁿 Flag for Petnjica (ME-23) 🏴󠁭󠁥󠀱󠀷󠁿 Flag for Rožaje (ME-17) 🏴󠁭󠁥󠀰󠀵󠁿 Flag for Budva (ME-05) 🏴󠁭󠁥󠀰󠀲󠁿 Flag for Bar (ME-02) 🏴󠁭󠁫󠀰󠀳󠁿 Flag for Berovo (MK-03) 🏴󠁭󠁥󠀱󠀹󠁿 Flag for Tivat (ME-19) 🏴󠁭󠁥󠀱󠀵󠁿 Flag for Plužine (ME-15) 🏴󠁭󠁥󠀱󠀰󠁿 Flag for Kotor (ME-10) 🏴󠁭󠁨󠁬󠁿 Flag for Ralik Chain (MH-L) 🏴󠁭󠁥󠀰󠀷󠁿 Flag for Danilovgrad (ME-07) 🏴󠁭󠁥󠀱󠀳󠁿 Flag for Plav (ME-13) 🏴󠁭󠁫󠀰󠀴󠁿 Flag for Bitola (MK-04) 🏴󠁭󠁥󠀰󠀴󠁿 Flag for Bijelo Polje (ME-04) 🏴󠁭󠁥󠀰󠀱󠁿 Flag for Andrijevica (ME-01) 👩🏿‍👩🏿‍👦🏿 Family - Woman: Dark Skin Tone, Woman: Dark Skin Tone, Boy: Dark Skin Tone 🏴󠁭󠁥󠀱󠀲󠁿 Flag for Nikšić (ME-12) 🏴󠁭󠁤󠁴󠁡󠁿 Flag for Taraclia (MD-TA) 🏴󠁭󠁥󠀱󠀱󠁿 Flag for Mojkovac (ME-11) 🏴󠁭󠁧󠁭󠁿 Flag for Mahajanga (MG-M) 🏴󠁭󠁥󠀲󠀲󠁿 Flag for Gusinje (ME-22) 🏴󠁭󠁧󠁦󠁿 Flag for Fianarantsoa (MG-F) 🏴󠁭󠁥󠀱󠀸󠁿 Flag for Šavnik (ME-18) 🏴󠁭󠁥󠀱󠀶󠁿 Flag for Podgorica (ME-16) 🏴󠁭󠁧󠁵󠁿 Flag for Toliara (MG-U) 🏴󠁭󠁧󠁤󠁿 Flag for Antsiranana (MG-D) 🏴󠁭󠁫󠀴󠀳󠁿 Flag for Kratovo (MK-43) 🏴󠁭󠁫󠀴󠀴󠁿 Flag for Kriva Palanka (MK-44) 🏴󠁭󠁫󠀵󠀲󠁿 Flag for Makedonski Brod (MK-52) 🏴󠁭󠁫󠀳󠀵󠁿 Flag for Jegunovce (MK-35) 🏴󠁭󠁫󠀴󠀹󠁿 Flag for Lozovo (MK-49) 🏴󠁭󠁫󠀴󠀷󠁿 Flag for Kumanovo (MK-47) 🏴󠁭󠁫󠀱󠀲󠁿 Flag for Vevčani (MK-12) 🏴󠁭󠁫󠀲󠀴󠁿 Flag for Demir Kapija (MK-24) 🏴󠁭󠁫󠀱󠀱󠁿 Flag for Vasilevo (MK-11) 🏴󠁭󠁫󠀳󠀰󠁿 Flag for Želino (MK-30) 🏴󠁭󠁫󠀳󠀶󠁿 Flag for Kavadarci (MK-36) 🏴󠁭󠁫󠀳󠀲󠁿 Flag for Zelenikovo (MK-32) 🏴󠁭󠁫󠀴󠀱󠁿 Flag for Konče (MK-41) 🏴󠁭󠁫󠀱󠀴󠁿 Flag for Vinica (MK-14) 🏴󠁭󠁫󠀱󠀰󠁿 Flag for Valandovo (MK-10) 🏴󠁭󠁫󠀵󠀵󠁿 Flag for Novaci (MK-55) 🏴󠁭󠁫󠀵󠀶󠁿 Flag for Novo Selo (MK-56) 🏴󠁭󠁫󠀳󠀴󠁿 Flag for Ilinden (MK-34) 🏴󠁭󠁫󠀵󠀱󠁿 Flag for Makedonska Kamenica (MK-51) 🏴󠁭󠁫󠀱󠀶󠁿 Flag for Vrapčište (MK-16) 🏴󠁭󠁫󠀰󠀸󠁿 Flag for Brvenica (MK-08) 🏴󠁭󠁫󠀲󠀰󠁿 Flag for Gradsko (MK-20) 🏴󠁭󠁫󠀵󠀰󠁿 Flag for Mavrovo and Rostuša (MK-50) 🏴󠁭󠁫󠀲󠀲󠁿 Flag for Debarca (MK-22) 🏴󠁭󠁫󠀱󠀹󠁿 Flag for Gostivar (MK-19) 🏴󠁭󠁫󠀵󠀳󠁿 Flag for Mogila (MK-53) 🏴󠁭󠁫󠀴󠀸󠁿 Flag for Lipkovo (MK-48) 🏴󠁭󠁫󠀳󠀷󠁿 Flag for Karbinci (MK-37) 🏴󠁭󠁫󠀳󠀳󠁿 Flag for Zrnovci (MK-33) 🏴󠁭󠁫󠀵󠀴󠁿 Flag for Negotino (MK-54) 🏴󠁭󠁫󠀴󠀰󠁿 Flag for Kičevo (MK-40) 🏴󠁭󠁫󠀲󠀱󠁿 Flag for Debar (MK-21) 🏴󠁭󠁫󠀱󠀳󠁿 Flag for Veles (MK-13) 🏴󠁭󠁫󠀲󠀶󠁿 Flag for Dojran (MK-26) 🏴󠁭󠁫󠀱󠀸󠁿 Flag for Gevgelija (MK-18) 🏴󠁭󠁫󠀴󠀲󠁿 Flag for Kočani (MK-42) 🏴󠁭󠁫󠀴󠀵󠁿 Flag for Krivogaštani (MK-45) 🏴󠁭󠁫󠀲󠀳󠁿 Flag for Delčevo (MK-23) 🏴󠁭󠁫󠀴󠀶󠁿 Flag for Kruševo (MK-46) 🏴󠁭󠁫󠀸󠀲󠁿 Flag for Čučer-Sandevo (MK-82) 🏴󠁭󠁫󠀶󠀲󠁿 Flag for Prilep (MK-62) 🏴󠁭󠁫󠀷󠀸󠁿 Flag for Centar Župa (MK-78) 🏴󠁭󠁭󠀰󠀴󠁿 Flag for Mandalay (MM-04) 🏴󠁭󠁬󠀴󠁿 Flag for Ségou (ML-4) 🏴󠁭󠁫󠀵󠀹󠁿 Flag for Petrovec (MK-59) 🏴󠁭󠁫󠀸󠀱󠁿 Flag for Češinovo-Obleševo (MK-81) 🏴󠁭󠁬󠀸󠁿 Flag for Kidal (ML-8) 🏴󠁭󠁭󠀰󠀲󠁿 Flag for Bago (MM-02) 🏴󠁭󠁫󠀷󠀲󠁿 Flag for Struga (MK-72) 🏴󠁭󠁫󠀷󠀵󠁿 Flag for Tearce (MK-75) 🏴󠁭󠁫󠀷󠀴󠁿 Flag for Studeničani (MK-74) 🏴󠁭󠁫󠀵󠀸󠁿 Flag for Ohrid (MK-58) 🏴󠁭󠁫󠀶󠀹󠁿 Flag for Sveti Nikole (MK-69) 🏴󠁭󠁫󠀷󠀳󠁿 Flag for Strumica (MK-73) 🏴󠁭󠁬󠀳󠁿 Flag for Sikasso (ML-3) 🏴󠁭󠁭󠀱󠀱󠁿 Flag for Kachin (MM-11) 🏴󠁭󠁫󠀶󠀶󠁿 Flag for Resen (MK-66) 🏴󠁭󠁬󠁢󠁫󠁯󠁿 Flag for Bamako (ML-BKO) 🏴󠁭󠁭󠀰󠀳󠁿 Flag for Magway (MM-03) 🏴󠁭󠁫󠀷󠀰󠁿 Flag for Sopište (MK-70) 🏴󠁭󠁫󠀷󠀱󠁿 Flag for Staro Nagoričane (MK-71) 🏴󠁭󠁭󠀰󠀷󠁿 Flag for Ayeyarwady (MM-07) 🏴󠁭󠁬󠀷󠁿 Flag for Gao (ML-7) 🏴󠁭󠁬󠀵󠁿 Flag for Mopti (ML-5) 🏴󠁭󠁫󠀸󠀳󠁿 Flag for Štip (MK-83) 🏴󠁭󠁭󠀱󠀲󠁿 Flag for Kayah (MM-12) 🏴󠁭󠁭󠀰󠀵󠁿 Flag for Tanintharyi (MM-05) 🏴󠁭󠁬󠀲󠁿 Flag for Koulikoro (ML-2) 🏴󠁭󠁫󠀶󠀳󠁿 Flag for Probištip (MK-63) 🏴󠁭󠁫󠀶󠀰󠁿 Flag for Pehčevo (MK-60) 🏴󠁭󠁭󠀰󠀱󠁿 Flag for Sagaing (MM-01) 🏴󠁭󠁫󠀸󠀰󠁿 Flag for Čaška (MK-80) 🏴󠁭󠁫󠀶󠀵󠁿 Flag for Rankovce (MK-65) 🏴󠁭󠁭󠀰󠀶󠁿 Flag for Yangon (MM-06) 🏴󠁭󠁫󠀷󠀶󠁿 Flag for Tetovo (MK-76) 🏴󠁭󠁫󠀶󠀷󠁿 Flag for Rosoman (MK-67) 🏴󠁭󠁲󠀰󠀳󠁿 Flag for Assaba (MR-03) 🏴󠁭󠁭󠀱󠀷󠁿 Flag for Shan (MM-17) 🏴󠁭󠁭󠀱󠀶󠁿 Flag for Rakhine (MM-16) 🏴󠁭󠁮󠀰󠀴󠀱󠁿 Flag for Khövsgöl (MN-041) 🏴󠁭󠁮󠀰󠀷󠀱󠁿 Flag for Bayan-Ölgii (MN-071) 🏴󠁭󠁮󠀰󠀶󠀹󠁿 Flag for Bayankhongor (MN-069) 🏴󠁭󠁮󠀰󠀶󠀱󠁿 Flag for Dornod (MN-061) 🏴󠁭󠁮󠀰󠀴󠀹󠁿 Flag for Selenge (MN-049) 🏴󠁭󠁮󠀱󠁿 Flag for Ulaanbaatar (MN-1) 🏴󠁭󠁮󠀰󠀳󠀷󠁿 Flag for Darkhan-Uul (MN-037) 🏴󠁭󠁮󠀰󠀴󠀷󠁿 Flag for Töv (MN-047) 🏴󠁭󠁭󠀱󠀵󠁿 Flag for Mon (MM-15) 🏴󠁭󠁲󠀰󠀶󠁿 Flag for Trarza (MR-06) 🏴󠁭󠁮󠀰󠀵󠀱󠁿 Flag for Sükhbaatar (MN-051) 🏴󠁭󠁲󠀰󠀴󠁿 Flag for Gorgol (MR-04) 🏴󠁭󠁮󠀰󠀵󠀵󠁿 Flag for Övörkhangai (MN-055) 🏴󠁭󠁭󠀱󠀴󠁿 Flag for Chin (MM-14) 🏴󠁭󠁮󠀰󠀶󠀷󠁿 Flag for Bulgan (MN-067) 🏴󠁭󠁮󠀰󠀵󠀷󠁿 Flag for Zavkhan (MN-057) 🏴󠁭󠁮󠀰󠀶󠀳󠁿 Flag for Dornogovi (MN-063) 🏴󠁭󠁮󠀰󠀵󠀳󠁿 Flag for Ömnögovi (MN-053) 🏴󠁭󠁭󠀱󠀳󠁿 Flag for Kayin (MM-13) 🏴󠁭󠁮󠀰󠀶󠀵󠁿 Flag for Govi-Altai (MN-065) 🏴󠁭󠁲󠀱󠀱󠁿 Flag for Tiris Zemmour (MR-11) 🏴󠁭󠁮󠀰󠀵󠀹󠁿 Flag for Dundgovi (MN-059) 🏴󠁭󠁮󠀰󠀷󠀳󠁿 Flag for Arkhangai (MN-073) 🏴󠁭󠁲󠀰󠀹󠁿 Flag for Tagant (MR-09) 🏴󠁭󠁮󠀰󠀴󠀳󠁿 Flag for Khovd (MN-043) 🏴󠁭󠁮󠀰󠀴󠀶󠁿 Flag for Uvs (MN-046) 🏴󠁭󠁮󠀰󠀶󠀴󠁿 Flag for Govisümber (MN-064) 🏴󠁭󠁲󠀰󠀵󠁿 Flag for Brakna (MR-05) 🏴󠁭󠁲󠀰󠀸󠁿 Flag for Dakhlet Nouadhibou (MR-08) 🏴󠁭󠁲󠀰󠀱󠁿 Flag for Hodh Ech Chargui (MR-01) 🏴󠁭󠁮󠀰󠀳󠀵󠁿 Flag for Orkhon (MN-035) 🏴󠁭󠁲󠀰󠀲󠁿 Flag for Hodh El Gharbi (MR-02) 🏴󠁭󠁭󠀱󠀸󠁿 Flag for Naypyidaw (MM-18) 🏴󠁭󠁲󠀰󠀷󠁿 Flag for Adrar (MR-07) 🏴󠁭󠁲󠀱󠀲󠁿 Flag for Inchiri (MR-12) 🏴󠁭󠁴󠀱󠀹󠁿 Flag for Iklin (MT-19) 🏴󠁭󠁴󠀱󠀴󠁿 Flag for Għarb (MT-14) 🏴󠁭󠁴󠀳󠀳󠁿 Flag for Mqabba (MT-33) 🏴󠁭󠁴󠀲󠀲󠁿 Flag for Kerċem (MT-22) 🏴󠁭󠁴󠀱󠀶󠁿 Flag for Għasri (MT-16) 🏴󠁭󠁴󠀲󠀴󠁿 Flag for Lija (MT-24) 🏴󠁭󠁴󠀰󠀵󠁿 Flag for Birżebbuġa (MT-05) 🏴󠁭󠁴󠀰󠀴󠁿 Flag for Birkirkara (MT-04) 🏴󠁭󠁴󠀳󠀱󠁿 Flag for Mġarr (MT-31) 🏴󠁭󠁴󠀰󠀲󠁿 Flag for Balzan (MT-02) 🏴󠁭󠁴󠀳󠀶󠁿 Flag for Munxar (MT-36) 🏴󠁭󠁴󠀱󠀳󠁿 Flag for Għajnsielem (MT-13) 🏴󠁭󠁴󠀳󠀸󠁿 Flag for Naxxar (MT-38) 🏴󠁭󠁴󠀰󠀹󠁿 Flag for Floriana (MT-09) 🏴󠁭󠁴󠀲󠀶󠁿 Flag for Marsa (MT-26) 🏴󠁭󠁴󠀰󠀷󠁿 Flag for Dingli (MT-07) 🏴󠁭󠁴󠀱󠀱󠁿 Flag for Gudja (MT-11) 🏴󠁭󠁴󠀲󠀳󠁿 Flag for Kirkop (MT-23) 🏴󠁭󠁴󠀲󠀷󠁿 Flag for Marsaskala (MT-27) 🏴󠁭󠁴󠀳󠀹󠁿 Flag for Paola (MT-39) 🏴󠁭󠁴󠀱󠀰󠁿 Flag for Fontana (MT-10) 🏴󠁭󠁴󠀳󠀴󠁿 Flag for Msida (MT-34) 🏴󠁭󠁴󠀳󠀷󠁿 Flag for Nadur (MT-37) 🏴󠁭󠁴󠀳󠀲󠁿 Flag for Mosta (MT-32) 🏴󠁭󠁴󠀳󠀵󠁿 Flag for Imtarfa (MT-35) 🏴󠁭󠁴󠀰󠀶󠁿 Flag for Cospicua (MT-06) 🏴󠁭󠁴󠀰󠀳󠁿 Flag for Birgu (MT-03) 🏴󠁭󠁲󠀱󠀴󠁿 Flag for Nouakchott Nord (MR-14) 🏴󠁭󠁴󠀱󠀲󠁿 Flag for Gżira (MT-12) 🏴󠁭󠁴󠀳󠀰󠁿 Flag for Mellieħa (MT-30) 🏴󠁭󠁴󠀱󠀷󠁿 Flag for Għaxaq (MT-17) 🏴󠁭󠁴󠀱󠀸󠁿 Flag for Ħamrun (MT-18) 🏴󠁭󠁴󠀰󠀸󠁿 Flag for Fgura (MT-08) 🏴󠁭󠁴󠀰󠀱󠁿 Flag for Attard (MT-01) 🏴󠁭󠁴󠀱󠀵󠁿 Flag for Għargħur (MT-15) 🏴󠁭󠁴󠀲󠀱󠁿 Flag for Kalkara (MT-21) 🏴󠁭󠁲󠀱󠀵󠁿 Flag for Nouakchott Sud (MR-15) 🏴󠁭󠁴󠀲󠀸󠁿 Flag for Marsaxlokk (MT-28) 🏴󠁭󠁴󠀴󠀵󠁿 Flag for Victoria (MT-45) 🏴󠁭󠁴󠀴󠀲󠁿 Flag for Qala (MT-42) 🏴󠁭󠁴󠀶󠀴󠁿 Flag for Żabbar (MT-64) 🏴󠁭󠁵󠁡󠁧󠁿 Flag for Agaléga (MU-AG) 🏴󠁭󠁴󠀵󠀸󠁿 Flag for Ta’ Xbiex (MT-58) 🏴󠁭󠁴󠀴󠀱󠁿 Flag for Pietà (MT-41) 🏴󠁭󠁴󠀵󠀲󠁿 Flag for Sannat (MT-52) 🏴󠁭󠁵󠁰󠁬󠁿 Flag for Port Louis District (MU-PL) 🏴󠁭󠁴󠀶󠀱󠁿 Flag for Xagħra (MT-61) 🏴󠁭󠁵󠁢󠁬󠁿 Flag for Rivière Noire (MU-BL) 🏴󠁭󠁴󠀵󠀶󠁿 Flag for Sliema (MT-56) 🏴󠁭󠁴󠀴󠀷󠁿 Flag for Safi (MT-47) 🏴󠁭󠁵󠁦󠁬󠁿 Flag for Flacq (MU-FL) 🏴󠁭󠁴󠀴󠀰󠁿 Flag for Pembroke (MT-40) 🏴󠁭󠁴󠀵󠀷󠁿 Flag for Swieqi (MT-57) 🏴󠁭󠁵󠁣󠁵󠁿 Flag for Curepipe (MU-CU) 🏴󠁭󠁴󠀶󠀸󠁿 Flag for Żurrieq (MT-68) 🏴󠁭󠁴󠀴󠀹󠁿 Flag for San Ġwann (MT-49) 🏴󠁭󠁵󠁧󠁰󠁿 Flag for Grand Port (MU-GP) 🏴󠁭󠁵󠁣󠁣󠁿 Flag for Cargados Carajos (MU-CC) 🏴󠁭󠁴󠀴󠀴󠁿 Flag for Qrendi (MT-44) 🏴󠁭󠁴󠀶󠀰󠁿 Flag for Valletta (MT-60) 🏴󠁭󠁵󠁰󠁡󠁿 Flag for Pamplemousses (MU-PA) 🏴󠁭󠁴󠀴󠀳󠁿 Flag for Qormi (MT-43) 🏴󠁭󠁵󠁰󠁵󠁿 Flag for Port Louis (MU-PU) 🏴󠁭󠁴󠀵󠀹󠁿 Flag for Tarxien (MT-59) 🏴󠁭󠁴󠀶󠀵󠁿 Flag for Żebbuġ Gozo (MT-65) 🏴󠁭󠁴󠀵󠀰󠁿 Flag for Saint Lawrence (MT-50) 🏴󠁭󠁴󠀶󠀷󠁿 Flag for Żejtun (MT-67) 🏴󠁭󠁴󠀵󠀱󠁿 Flag for St. Paul’s Bay (MT-51) 🏴󠁭󠁴󠀵󠀳󠁿 Flag for Santa Luċija (MT-53) 🏴󠁭󠁴󠀶󠀶󠁿 Flag for Żebbuġ (MT-66) 🏴󠁭󠁴󠀴󠀶󠁿 Flag for Rabat (MT-46) 🏴󠁭󠁴󠀵󠀵󠁿 Flag for Siġġiewi (MT-55) 👩🏽‍👩🏽‍👧🏽 Family - Woman: Medium Skin Tone, Woman: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁭󠁴󠀵󠀴󠁿 Flag for Santa Venera (MT-54) 🏴󠁭󠁴󠀶󠀳󠁿 Flag for Xgħajra (MT-63) 🏴󠁭󠁵󠁭󠁯󠁿 Flag for Moka (MU-MO) 🏴󠁭󠁸󠁭󠁩󠁣󠁿 Flag for Michoacán (MX-MIC) 🏴󠁭󠁷󠁮󠁿 Flag for Northern (MW-N) 🏴󠁭󠁶󠁵󠁮󠁿 Flag for Upper North Province (MV-UN) 🏴󠁭󠁸󠁣󠁯󠁬󠁿 Flag for Colima (MX-COL) 🏴󠁭󠁵󠁲󠁯󠁿 Flag for Rodrigues (MU-RO) 🏴󠁭󠁸󠁧󠁵󠁡󠁿 Flag for Guanajuato (MX-GUA) 🏴󠁭󠁸󠁣󠁭󠁸󠁿 Flag for Ciudad de Mexico (MX-CMX) 🏴󠁭󠁸󠁰󠁵󠁥󠁿 Flag for Puebla (MX-PUE) 🏴󠁭󠁵󠁱󠁢󠁿 Flag for Quatre Bornes (MU-QB) 🏴󠁭󠁸󠁯󠁡󠁸󠁿 Flag for Oaxaca (MX-OAX) 🏴󠁭󠁷󠁣󠁿 Flag for Central (MW-C) 🏴󠁭󠁵󠁳󠁡󠁿 Flag for Savanne (MU-SA) 🏴󠁭󠁸󠁭󠁯󠁲󠁿 Flag for Morelos (MX-MOR) 🏴󠁭󠁸󠁨󠁩󠁤󠁿 Flag for Hidalgo (MX-HID) 🏴󠁭󠁸󠁡󠁧󠁵󠁿 Flag for Aguascalientes (MX-AGU) 🏴󠁭󠁸󠁣󠁡󠁭󠁿 Flag for Campeche (MX-CAM) 🏴󠁭󠁸󠁮󠁬󠁥󠁿 Flag for Nuevo León (MX-NLE) 🏴󠁭󠁶󠁭󠁬󠁥󠁿 Flag for Malé (MV-MLE) 🏴󠁭󠁸󠁧󠁲󠁯󠁿 Flag for Guerrero (MX-GRO) 🏴󠁭󠁵󠁶󠁰󠁿 Flag for Vacoas-Phoenix (MU-VP) 👨🏻‍👨🏻‍👦🏻‍👧🏻 Family - Man: Light Skin Tone, Man: Light Skin Tone, Boy: Light Skin Tone, Girl: Light Skin Tone 🏴󠁭󠁶󠁮󠁣󠁿 Flag for North Central Province (MV-NC) 🏴󠁭󠁸󠁭󠁥󠁸󠁿 Flag for Mexico State (MX-MEX) 🏴󠁭󠁵󠁰󠁷󠁿 Flag for Plaines Wilhems (MU-PW) 🏴󠁭󠁶󠁣󠁥󠁿 Flag for Central Province (MV-CE) 🏴󠁭󠁸󠁣󠁯󠁡󠁿 Flag for Coahuila (MX-COA) 🏴󠁭󠁶󠁳󠁵󠁿 Flag for South Province (MV-SU) 🏴󠁭󠁸󠁣󠁨󠁰󠁿 Flag for Chiapas (MX-CHP) 🏴󠁭󠁷󠁳󠁿 Flag for Southern (MW-S) 🏴󠁭󠁺󠁳󠁿 Flag for Sofala (MZ-S) 🏴󠁭󠁹󠀰󠀹󠁿 Flag for Perlis (MY-09) 🏴󠁭󠁸󠁶󠁥󠁲󠁿 Flag for Veracruz (MX-VER) 🏴󠁭󠁹󠀱󠀳󠁿 Flag for Sarawak (MY-13) 🏴󠁭󠁹󠀰󠀳󠁿 Flag for Kelantan (MY-03) 🏴󠁮󠁡󠁣󠁡󠁿 Flag for Zambezi (NA-CA) 🏴󠁭󠁺󠁢󠁿 Flag for Manica (MZ-B) 🏴󠁭󠁹󠀱󠀵󠁿 Flag for Labuan (MY-15) 🏴󠁭󠁺󠁰󠁿 Flag for Cabo Delgado (MZ-P) 🏴󠁮󠁡󠁨󠁡󠁿 Flag for Hardap (NA-HA) 🏴󠁭󠁺󠁴󠁿 Flag for Tete (MZ-T) 🏴󠁭󠁹󠀰󠀲󠁿 Flag for Kedah (MY-02) 🏴󠁭󠁹󠀰󠀶󠁿 Flag for Pahang (MY-06) 🏴󠁭󠁹󠀰󠀷󠁿 Flag for Penang (MY-07) 🏴󠁭󠁹󠀰󠀸󠁿 Flag for Perak (MY-08) 🏴󠁭󠁺󠁬󠁿 Flag for Maputo Province (MZ-L) 🏴󠁢󠁲󠁧󠁯󠁿 Flag for Goiás (BR-GO) 🏴󠁭󠁹󠀱󠀱󠁿 Flag for Terengganu (MY-11) 🏴󠁭󠁺󠁩󠁿 Flag for Inhambane (MZ-I) 🏴󠁭󠁹󠀰󠀴󠁿 Flag for Malacca (MY-04) 🏴󠁮󠁡󠁥󠁲󠁿 Flag for Erongo (NA-ER) 🏴󠁭󠁸󠁴󠁬󠁡󠁿 Flag for Tlaxcala (MX-TLA) 🏴󠁭󠁹󠀰󠀵󠁿 Flag for Negeri Sembilan (MY-05) 🏴󠁭󠁸󠁺󠁡󠁣󠁿 Flag for Zacatecas (MX-ZAC) 🏴󠁭󠁸󠁴󠁡󠁭󠁿 Flag for Tamaulipas (MX-TAM) 🏴󠁭󠁺󠁡󠁿 Flag for Niassa (MZ-A) 🏴󠁭󠁺󠁭󠁰󠁭󠁿 Flag for Maputo (MZ-MPM) 🏴󠁭󠁺󠁮󠁿 Flag for Nampula (MZ-N) 🏴󠁭󠁹󠀱󠀶󠁿 Flag for Putrajaya (MY-16) 🏴󠁭󠁸󠁳󠁩󠁮󠁿 Flag for Sinaloa (MX-SIN) 🏴󠁭󠁸󠁹󠁵󠁣󠁿 Flag for Yucatán (MX-YUC) 🏴󠁭󠁹󠀱󠀲󠁿 Flag for Sabah (MY-12) 👩🏼‍👩🏼‍👧🏼‍👧🏼 Family - Woman: Medium-Light Skin Tone, Woman: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone, Girl: Medium-Light Skin Tone 🏴󠁭󠁺󠁱󠁿 Flag for Zambezia (MZ-Q) 🏴󠁭󠁸󠁱󠁵󠁥󠁿 Flag for Querétaro (MX-QUE) 🏴󠁭󠁺󠁧󠁿 Flag for Gaza (MZ-G) 🏴󠁮󠁡󠁯󠁤󠁿 Flag for Otjozondjupa (NA-OD) 🏴󠁮󠁥󠀴󠁿 Flag for Maradi (NE-4) 🏴󠁮󠁡󠁫󠁵󠁿 Flag for Kunene (NA-KU) 🏴󠁮󠁧󠁡󠁫󠁿 Flag for Akwa Ibom (NG-AK) 🏴󠁮󠁥󠀵󠁿 Flag for Tahoua (NE-5) 🏴󠁭󠁵󠁲󠁲󠁿 Flag for Rivière du Rempart (MU-RR) 🏴󠁮󠁧󠁩󠁭󠁿 Flag for Imo (NG-IM) 🏴󠁮󠁧󠁫󠁴󠁿 Flag for Katsina (NG-KT) 🏴󠁮󠁥󠀳󠁿 Flag for Dosso (NE-3) 🏴󠁮󠁥󠀶󠁿 Flag for Tillabéri (NE-6) 🏴󠁮󠁧󠁥󠁫󠁿 Flag for Ekiti (NG-EK) 🏴󠁮󠁡󠁯󠁨󠁿 Flag for Omaheke (NA-OH) 🏴󠁮󠁧󠁢󠁡󠁿 Flag for Bauchi (NG-BA) 🏴󠁮󠁡󠁫󠁡󠁿 Flag for Karas (NA-KA) 🏴󠁮󠁧󠁢󠁹󠁿 Flag for Bayelsa (NG-BY) 🏴󠁮󠁡󠁯󠁷󠁿 Flag for Ohangwena (NA-OW) 🏴󠁮󠁧󠁢󠁥󠁿 Flag for Benue (NG-BE) 🏴󠁮󠁧󠁥󠁮󠁿 Flag for Enugu (NG-EN) 🏴󠁮󠁡󠁯󠁮󠁿 Flag for Oshana (NA-ON) 🏴󠁮󠁧󠁫󠁤󠁿 Flag for Kaduna (NG-KD) 👨🏻‍👶🏻‍👦🏻 Family - Man: Light Skin Tone, Baby: Light Skin Tone, Boy: Light Skin Tone 🏴󠁮󠁧󠁫󠁥󠁿 Flag for Kebbi (NG-KE) 🏴󠁮󠁧󠁪󠁩󠁿 Flag for Jigawa (NG-JI) 🏴󠁮󠁥󠀸󠁿 Flag for Niamey (NE-8) 🏴󠁮󠁧󠁡󠁮󠁿 Flag for Anambra (NG-AN) 🏴󠁮󠁧󠁧󠁯󠁿 Flag for Gombe (NG-GO) 🏴󠁮󠁥󠀱󠁿 Flag for Agadez (NE-1) 🏴󠁮󠁡󠁫󠁨󠁿 Flag for Khomas (NA-KH) 🏴󠁮󠁥󠀲󠁿 Flag for Diffa (NE-2) 🏴󠁭󠁹󠀰󠀱󠁿 Flag for Johor (MY-01) 🏴󠁮󠁧󠁫󠁮󠁿 Flag for Kano (NG-KN) 🏴󠁮󠁡󠁯󠁳󠁿 Flag for Omusati (NA-OS) 🏴󠁮󠁧󠁫󠁯󠁿 Flag for Kogi (NG-KO) 🏴󠁮󠁧󠁥󠁤󠁿 Flag for Edo (NG-ED) 🏴󠁮󠁧󠁡󠁢󠁿 Flag for Abia (NG-AB) 🏴󠁮󠁡󠁯󠁴󠁿 Flag for Oshikoto (NA-OT) 🏴󠁮󠁡󠁫󠁷󠁿 Flag for Kavango West (NA-KW) 🏴󠁮󠁧󠁥󠁢󠁿 Flag for Ebonyi (NG-EB) 🏴󠁮󠁥󠀷󠁿 Flag for Zinder (NE-7) 🏴󠁮󠁩󠁪󠁩󠁿 Flag for Jinotega (NI-JI) 🏴󠁮󠁧󠁮󠁡󠁿 Flag for Nasarawa (NG-NA) 🏴󠁮󠁬󠁦󠁲󠁿 Flag for Friesland (NL-FR) 🏴󠁮󠁧󠁳󠁯󠁿 Flag for Sokoto (NG-SO) 🏴󠁮󠁩󠁲󠁩󠁿 Flag for Rivas (NI-RI) 🏴󠁮󠁩󠁮󠁳󠁿 Flag for Nueva Segovia (NI-NS) 🏴󠁮󠁧󠁰󠁬󠁿 Flag for Plateau (NG-PL) 🏴󠁮󠁧󠁹󠁯󠁿 Flag for Yobe (NG-YO) 🏴󠁮󠁬󠁢󠁱󠀱󠁿 Flag for Bonaire (NL-BQ1) 🏴󠁮󠁩󠁡󠁮󠁿 Flag for Atlántico Norte (NI-AN) 🏴󠁮󠁧󠁺󠁡󠁿 Flag for Zamfara (NG-ZA) 🏴󠁮󠁬󠁧󠁥󠁿 Flag for Gelderland (NL-GE) 🏴󠁮󠁧󠁯󠁹󠁿 Flag for Oyo (NG-OY) 🏴󠁮󠁩󠁭󠁤󠁿 Flag for Madriz (NI-MD) 🏴󠁮󠁩󠁣󠁩󠁿 Flag for Chinandega (NI-CI) 🏴󠁮󠁧󠁯󠁮󠁿 Flag for Ondo (NG-ON) 👨🏽‍👨🏽‍👦🏽‍👧🏽 Family - Man: Medium Skin Tone, Man: Medium Skin Tone, Boy: Medium Skin Tone, Girl: Medium Skin Tone 🏴󠁤󠁥󠁮󠁷󠁿 Flag for North Rhine-Westphalia (DE-NW) 🏴󠁮󠁧󠁬󠁡󠁿 Flag for Lagos (NG-LA) 🏴󠁮󠁩󠁭󠁮󠁿 Flag for Managua (NI-MN) 🏴󠁮󠁩󠁡󠁳󠁿 Flag for Atlántico Sur (NI-AS) 🏴󠁮󠁬󠁣󠁷󠁿 Flag for Curaçao (NL-CW) 🏴󠁮󠁩󠁢󠁯󠁿 Flag for Boaco (NI-BO) 🏴󠁮󠁧󠁲󠁩󠁿 Flag for Rivers (NG-RI) 🏴󠁮󠁩󠁧󠁲󠁿 Flag for Granada (NI-GR) 🏴󠁮󠁩󠁣󠁯󠁿 Flag for Chontales (NI-CO) 🏴󠁮󠁬󠁧󠁲󠁿 Flag for Groningen (NL-GR) 🏴󠁮󠁬󠁢󠁱󠀳󠁿 Flag for Sint Eustatius (NL-BQ3) 🏴󠁮󠁩󠁳󠁪󠁿 Flag for Río San Juan (NI-SJ) 🏴󠁮󠁧󠁯󠁳󠁿 Flag for Osun (NG-OS) 🏴󠁮󠁧󠁴󠁡󠁿 Flag for Taraba (NG-TA) 🏴󠁮󠁬󠁦󠁬󠁿 Flag for Flevoland (NL-FL) 🏴󠁮󠁩󠁭󠁴󠁿 Flag for Matagalpa (NI-MT) 🏴󠁮󠁬󠁤󠁲󠁿 Flag for Drenthe (NL-DR) 🏴󠁮󠁩󠁣󠁡󠁿 Flag for Carazo (NI-CA) 🏴󠁮󠁧󠁫󠁷󠁿 Flag for Kwara (NG-KW) 🏴󠁮󠁧󠁮󠁩󠁿 Flag for Niger (NG-NI) 🏴󠁮󠁩󠁥󠁳󠁿 Flag for Estelí (NI-ES) 🏴󠁮󠁬󠁺󠁨󠁿 Flag for South Holland (NL-ZH)
2.254053
2
src/ngc/main.py
HubTou/ngc
0
9842
#!/usr/bin/env python """ ngc - n-grams count License: 3-clause BSD (see https://opensource.org/licenses/BSD-3-Clause) Author: <NAME> """ import getopt import logging import os import re import string import sys import unicode2ascii # Version string used by the what(1) and ident(1) commands: ID = "@(#) $Id: ngc - n-grams count v1.0.2 (September 26, 2021) by <NAME> $" # Default parameters. Can be superseded by command line options parameters = { "Convert": { "Unicode to ASCII": False, "Upper to lower case": False, "Lower to upper case": False, "Spaces to one space": False, }, "Discard": { "Unicode characters": False, "Upper case letters": False, "Lower case letters": False, "Connection symbols": False, # ' - "Digits": False, "Punctuation": False, # . , ; : ! ? "Other printable symbols": False, "Spaces": False, # space tab return formfeed vtab "Control characters": False, }, "Length": 1, "Fixed block": False, # Sliding-window mode by default "Word boundary": False, "Partial": { "Discard": False, "Keep": True, "Justify": False, }, "Show": { "Text": False, "N-grams": True, "Summary": False, }, } occurrences = {} summary = { "Upper case letters": 0, "Lower case letters": 0, "Connection symbols": 0, "Digits": 0, "Punctuation": 0, "Other printable symbols": 0, "Spaces": 0, "Other spaces": 0, "Control characters": 0, "Unicode letters": 0, "Unicode marks": 0, "Unicode numbers": 0, "Unicode punctuations": 0, "Unicode symbols": 0, "Unicode separators": 0, "Unicode others": 0, "All unicode characters": 0, "All characters": 0, "All n-grams": 0 } ################################################################################ def initialize_debugging(program_name): """Debugging set up""" console_log_format = program_name + ": %(levelname)s: %(message)s" logging.basicConfig(format=console_log_format, level=logging.DEBUG) logging.disable(logging.INFO) ################################################################################ def display_help(): """Displays usage and help""" print("usage: ngc [-b|--block] [-c|--convert ARGS] [--debug]", file=sys.stderr) print(" [-d|--discard ARGS] [--help|-?] [-l|--length ARG]", file=sys.stderr) print(" [-p|--partial ARG] [-q|--quiet] [-s|--summary] [-t|--text]", file=sys.stderr) print(" [--version] [-w|--word] [--] [filename ...]", file=sys.stderr) print(" ----------------- ----------------------------------------------------", file=sys.stderr ) print(" -b|--block Use fixed- instead of sliding-windows blocks", file=sys.stderr) print(" -c|--convert ARGS Convert text input. A combination of:", file=sys.stderr) print(" ARG = a - Unicode characters to ASCII (remove accents)", file=sys.stderr) print(" ARG = l - Upper case letters to lower", file=sys.stderr) print(" ARG = u - Lower case letters to upper", file=sys.stderr) print(" ARG = s - Spaces-like characters to 1 space", file=sys.stderr) print(" ARGS l and u can't be used at the same time", file=sys.stderr) print(" -d|--discard ARGS Discard characters. A combination of:", file=sys.stderr) print(" ARG = U - Unicode characters", file=sys.stderr) print(" ARG = u - Upper case letters", file=sys.stderr) print(" ARG = l - Lower case letters", file=sys.stderr) print(" ARG = L - All letters", file=sys.stderr) print(" ARG = c - Connection symbols ('-)", file=sys.stderr) print(" ARG = d - Digits", file=sys.stderr) print(" ARG = p - Punctuation (.,;:!?)", file=sys.stderr) print(" ARG = o - Other printable symbols", file=sys.stderr) print(" ARG = s - Spaces (space, tab, return, formfeed, vtab)", file=sys.stderr) print(" ARG = n - Non printable Control characters", file=sys.stderr) print(" -l|--length ARG Length of the n-gram. Defaults to 1", file=sys.stderr) print(" -p|--partial ARG What to do with partial blocks? One among:", file=sys.stderr) print(" ARG = d - Discard", file=sys.stderr) print(" ARG = k - Keep as-is", file=sys.stderr) print(" ARG = j - Keep but right-justify with spaces", file=sys.stderr) print(" -q|--quiet Don't show occurrences and frequency by n-gram", file=sys.stderr) print(" -s|--summary Show a summary of what was processed", file=sys.stderr) print(" -t|--text Show modified text input", file=sys.stderr) print(" -w|--word Respect Word boundaries (delimited by spaces)", file=sys.stderr) print(" --debug Enable debug mode", file=sys.stderr) print(" --help|-? Print usage and this help message and exit", file=sys.stderr) print(" --version Print version and exit", file=sys.stderr) print(" -- Options processing terminator", file=sys.stderr) print(file=sys.stderr) ################################################################################ def process_environment_variables(): """Process environment variables""" if "NGC_DEBUG" in os.environ.keys(): logging.disable(logging.NOTSET) ################################################################################ def process_command_line(): """Process command line""" # pylint: disable=C0103 global parameters # pylint: enable=C0103 # option letters followed by : expect an argument # same for option strings followed by = character_options = "bc:d:l:p:qstw?" string_options = [ "block", "convert=", "debug", "discard=", "help", "length=", "partial=", "quiet", "summary", "text", "version", "word", ] try: options, remaining_arguments = getopt.getopt( sys.argv[1:], character_options, string_options ) except getopt.GetoptError as error: logging.critical(error) display_help() sys.exit(1) for option, argument in options: if option in ("-b", "--block"): parameters["Fixed block"] = True elif option in ("-c", "--convert"): if 'l' in argument and 'u' in argument: logging.critical("-c|--convert parameter can't contain [lu] at the same time") sys.exit(1) if 'a' in argument: parameters["Convert"]["Unicode to ASCII"] = True if 'l' in argument: parameters["Convert"]["Upper to lower case"] = True if 'u' in argument: parameters["Convert"]["Lower to upper case"] = True if 's' in argument: parameters["Convert"]["Spaces to one space"] = True elif option in ("-d", "--discard"): if 'U' in argument: parameters["Discard"]["Unicode characters"] = True if 'u' in argument: parameters["Discard"]["Upper case letters"] = True if 'l' in argument: parameters["Discard"]["Lower case letters"] = True if 'L' in argument: parameters["Discard"]["Upper case letters"] = True parameters["Discard"]["Lower case letters"] = True if 'c' in argument: parameters["Discard"]["Connection symbols"] = True if 'd' in argument: parameters["Discard"]["Digits"] = True if 'p' in argument: parameters["Discard"]["Punctuation"] = True if 'o' in argument: parameters["Discard"]["Other printable symbols"] = True if 's' in argument: parameters["Discard"]["Spaces"] = True if 'n' in argument: parameters["Discard"]["Control characters"] = True elif option in ("-l", "--length"): if argument.isdigit() and int(argument) >= 0: parameters["Length"] = int(argument) else: logging.critical("-l|--length parameter must be a strictly positive integer") sys.exit(1) elif option in ("-p", "--partial"): if len(argument) > 1 or argument not in ('d', 'k', 'j'): logging.critical("-p|--partial parameter must be a single character among [dkj]") sys.exit(1) if argument == 'd': parameters["Partial"]["Discard"] = True parameters["Partial"]["Keep"] = False elif argument == 'j': parameters["Partial"]["Justify"] = True parameters["Partial"]["Keep"] = False elif option in ("-q", "--quiet"): parameters["Show"]["N-grams"] = False elif option in ("-s", "--summary"): parameters["Show"]["Summary"] = True elif option in ("-t", "--text"): parameters["Show"]["Text"] = True elif option in ("-w", "--word"): parameters["Word boundary"] = True elif option == "--debug": logging.disable(logging.NOTSET) elif option in ("--help", "-?"): display_help() sys.exit(0) elif option == "--version": print(ID.replace("@(" + "#)" + " $" + "Id" + ": ", "").replace(" $", "")) sys.exit(0) logging.debug("process_command_line(): parameters:") logging.debug(parameters) logging.debug("process_command_line(): remaining_arguments:") logging.debug(remaining_arguments) return remaining_arguments ################################################################################ def handle_partial_n_gram(text): """Analyze n-grams frequency in a string""" # pylint: disable=C0103 global occurrences, summary # pylint: enable=C0103 if not parameters["Partial"]["Discard"]: if parameters["Partial"]["Justify"]: for _ in range(parameters["Length"] - len(text)): text += " " if text in occurrences: occurrences[text] += 1 else: occurrences[text] = 1 summary["All n-grams"] += 1 ################################################################################ def frequency_analysis(text): """Analyze n-grams frequency in a string""" # pylint: disable=C0103 global occurrences, summary # pylint: enable=C0103 if parameters["Show"]["Summary"]: for character in text: if ord(character) < 128: if character in string.ascii_uppercase: summary["Upper case letters"] += 1 elif character in string.ascii_lowercase: summary["Lower case letters"] += 1 elif character in ("'", "-"): summary["Connection symbols"] += 1 elif character in string.digits: summary["Digits"] += 1 elif character in (".", ",", ";", ":", "!", "?"): summary["Punctuation"] += 1 elif character == " ": summary["Spaces"] += 1 elif character in string.whitespace: summary["Other spaces"] += 1 elif (ord(character) < 32 and ord(character) not in (9, 11, 12, 13)) \ or ord(character) == 127: summary["Control characters"] += 1 else: summary["Other printable symbols"] += 1 else: summary["All unicode characters"] += 1 if unicode2ascii.is_unicode_letter(character): summary["Unicode letters"] += 1 elif unicode2ascii.is_unicode_mark(character): summary["Unicode marks"] += 1 elif unicode2ascii.is_unicode_number(character): summary["Unicode numbers"] += 1 elif unicode2ascii.is_unicode_punctuation(character): summary["Unicode punctuations"] += 1 elif unicode2ascii.is_unicode_symbol(character): summary["Unicode symbols"] += 1 elif unicode2ascii.is_unicode_separator(character): summary["Unicode separators"] += 1 else: summary["Unicode others"] += 1 if len(text) <= parameters["Length"]: if text: handle_partial_n_gram(text) else: i = 0 while i < len(text) + 1 - parameters["Length"]: sequence = text[i:i + parameters["Length"]] if sequence in occurrences: occurrences[sequence] += 1 else: occurrences[sequence] = 1 summary["All n-grams"] += 1 if parameters["Fixed block"]: i += parameters["Length"] else: i += 1 if i < len(text): handle_partial_n_gram(text[i:]) ################################################################################ def process_line(line): """Process a text line""" # pylint: disable=C0103 global summary # pylint: enable=C0103 line = line.rstrip(os.linesep) # Conversions: if parameters["Convert"]["Unicode to ASCII"]: line = unicode2ascii.unicode_to_ascii_string(line) if parameters["Convert"]["Upper to lower case"]: line = line.lower() if parameters["Convert"]["Lower to upper case"]: line = line.upper() # Discards: if parameters["Discard"]["Unicode characters"]: line = "".join([c for c in line if ord(c) < 128]) if parameters["Discard"]["Upper case letters"]: line = re.sub(r"[A-Z]+", "", line) if parameters["Discard"]["Lower case letters"]: line = re.sub(r"[a-z]+", "", line) if parameters["Discard"]["Connection symbols"]: line = re.sub(r"[-']+", "", line) if parameters["Discard"]["Digits"]: line = re.sub(r"[0-9]+", "", line) if parameters["Discard"]["Punctuation"]: line = re.sub(r"[\.,;:!\?]+", "", line) if parameters["Discard"]["Other printable symbols"]: line = re.sub(r"[\"#$&@\[\\\]_`{|}~%()\*+/<=>^]+", "", line) if parameters["Discard"]["Spaces"]: line = re.sub(r"[" + string.whitespace + r"]+", "", line) if parameters["Discard"]["Control characters"]: line = "".join( [c for c in line if not (ord(c) < 9 or (ord(c) > 13 and ord(c) < 32) or ord(c) == 127)] ) # Late conversions: if parameters["Convert"]["Spaces to one space"]: line = re.sub(r"[" + string.whitespace + r"]+", " ", line) if parameters["Show"]["Text"]: print(line) if parameters["Word boundary"]: # Splitting words on all kind of whitespaces: for word in line.split(): if word: frequency_analysis(word) summary["All characters"] += len(word) else: frequency_analysis(line) summary["All characters"] += len(line) ################################################################################ def process_file(filename): """Process the file designated by filename, line by line""" with open(filename, "r") as file: for line in file.readlines(): process_line(line) ################################################################################ def compute_kappa_plaintext(): """Return kappa_plaintext for the processed input stream""" # pylint: disable=C0103 global occurrences, summary # pylint: enable=C0103 # See https://en.wikipedia.org/wiki/Index_of_coincidence index = 0.0 for occurrence in occurrences.values(): index += occurrence * (occurrence - 1) return index / (summary["All n-grams"] * (summary["All n-grams"] - 1)) ################################################################################ def compute_coincidence_index(kappa_plaintext): """Return coincidence index for a given kappa_plaintext and alphabet""" # pylint: disable=C0103 global summary # pylint: enable=C0103 if summary["Unicode separators"]: # Unknown alphabet size return 0 alphabet_size = 0 if summary["Upper case letters"]: alphabet_size += len(string.ascii_uppercase) if summary["Lower case letters"]: alphabet_size += len(string.ascii_lowercase) if summary["Digits"]: alphabet_size += len(string.digits) if summary["Connection symbols"]: alphabet_size += len("'-") if summary["Punctuation"]: alphabet_size += len(".,;:?!") if summary["Other printable symbols"]: alphabet_size += len("\"#$&@[\\]_`{|}~%()*+/<=>^") if summary["Spaces"]: alphabet_size += 1 if summary["Other spaces"]: alphabet_size += len(string.whitespace) - 1 if summary["Control characters"]: alphabet_size += 29 return kappa_plaintext * alphabet_size ################################################################################ def main(): """The program's main entry point""" program_name = os.path.basename(sys.argv[0]) initialize_debugging(program_name) process_environment_variables() arguments = process_command_line() exit_status = 0 # Reading from files whose name were given as arguments: if len(arguments): for filename in arguments: if os.path.isfile(filename): process_file(filename) else: logging.error("The argument '%s' is not a filename", filename) exit_status = 1 # Reading from standard input as there are no arguments: else: for line in sys.stdin: process_line(line) # Displaying occurrences and frequency by n-gram: if parameters["Show"]["N-grams"]: if parameters["Show"]["Text"]: print("--") decreasing_occurrences = dict(sorted(occurrences.items(), key=lambda t: t[1], reverse=True)) for key, value in decreasing_occurrences.items(): print("'{}'\t{}\t{:.2f}%".format(key, value, (value/summary["All n-grams"])*100)) # Displaying summary: if parameters["Show"]["Summary"]: print("==") for key, value in summary.items(): print("{:23s}\t{:d}".format(key, value)) print() kappa_plaintext = compute_kappa_plaintext() coincidence_index = compute_coincidence_index(kappa_plaintext) print("{:23s}\t{}".format("Kappa-plaintext", kappa_plaintext)) print("{:23s}\t{}".format("Index of coincidence", coincidence_index)) sys.exit(exit_status) if __name__ == "__main__": main()
#!/usr/bin/env python """ ngc - n-grams count License: 3-clause BSD (see https://opensource.org/licenses/BSD-3-Clause) Author: <NAME> """ import getopt import logging import os import re import string import sys import unicode2ascii # Version string used by the what(1) and ident(1) commands: ID = "@(#) $Id: ngc - n-grams count v1.0.2 (September 26, 2021) by <NAME> $" # Default parameters. Can be superseded by command line options parameters = { "Convert": { "Unicode to ASCII": False, "Upper to lower case": False, "Lower to upper case": False, "Spaces to one space": False, }, "Discard": { "Unicode characters": False, "Upper case letters": False, "Lower case letters": False, "Connection symbols": False, # ' - "Digits": False, "Punctuation": False, # . , ; : ! ? "Other printable symbols": False, "Spaces": False, # space tab return formfeed vtab "Control characters": False, }, "Length": 1, "Fixed block": False, # Sliding-window mode by default "Word boundary": False, "Partial": { "Discard": False, "Keep": True, "Justify": False, }, "Show": { "Text": False, "N-grams": True, "Summary": False, }, } occurrences = {} summary = { "Upper case letters": 0, "Lower case letters": 0, "Connection symbols": 0, "Digits": 0, "Punctuation": 0, "Other printable symbols": 0, "Spaces": 0, "Other spaces": 0, "Control characters": 0, "Unicode letters": 0, "Unicode marks": 0, "Unicode numbers": 0, "Unicode punctuations": 0, "Unicode symbols": 0, "Unicode separators": 0, "Unicode others": 0, "All unicode characters": 0, "All characters": 0, "All n-grams": 0 } ################################################################################ def initialize_debugging(program_name): """Debugging set up""" console_log_format = program_name + ": %(levelname)s: %(message)s" logging.basicConfig(format=console_log_format, level=logging.DEBUG) logging.disable(logging.INFO) ################################################################################ def display_help(): """Displays usage and help""" print("usage: ngc [-b|--block] [-c|--convert ARGS] [--debug]", file=sys.stderr) print(" [-d|--discard ARGS] [--help|-?] [-l|--length ARG]", file=sys.stderr) print(" [-p|--partial ARG] [-q|--quiet] [-s|--summary] [-t|--text]", file=sys.stderr) print(" [--version] [-w|--word] [--] [filename ...]", file=sys.stderr) print(" ----------------- ----------------------------------------------------", file=sys.stderr ) print(" -b|--block Use fixed- instead of sliding-windows blocks", file=sys.stderr) print(" -c|--convert ARGS Convert text input. A combination of:", file=sys.stderr) print(" ARG = a - Unicode characters to ASCII (remove accents)", file=sys.stderr) print(" ARG = l - Upper case letters to lower", file=sys.stderr) print(" ARG = u - Lower case letters to upper", file=sys.stderr) print(" ARG = s - Spaces-like characters to 1 space", file=sys.stderr) print(" ARGS l and u can't be used at the same time", file=sys.stderr) print(" -d|--discard ARGS Discard characters. A combination of:", file=sys.stderr) print(" ARG = U - Unicode characters", file=sys.stderr) print(" ARG = u - Upper case letters", file=sys.stderr) print(" ARG = l - Lower case letters", file=sys.stderr) print(" ARG = L - All letters", file=sys.stderr) print(" ARG = c - Connection symbols ('-)", file=sys.stderr) print(" ARG = d - Digits", file=sys.stderr) print(" ARG = p - Punctuation (.,;:!?)", file=sys.stderr) print(" ARG = o - Other printable symbols", file=sys.stderr) print(" ARG = s - Spaces (space, tab, return, formfeed, vtab)", file=sys.stderr) print(" ARG = n - Non printable Control characters", file=sys.stderr) print(" -l|--length ARG Length of the n-gram. Defaults to 1", file=sys.stderr) print(" -p|--partial ARG What to do with partial blocks? One among:", file=sys.stderr) print(" ARG = d - Discard", file=sys.stderr) print(" ARG = k - Keep as-is", file=sys.stderr) print(" ARG = j - Keep but right-justify with spaces", file=sys.stderr) print(" -q|--quiet Don't show occurrences and frequency by n-gram", file=sys.stderr) print(" -s|--summary Show a summary of what was processed", file=sys.stderr) print(" -t|--text Show modified text input", file=sys.stderr) print(" -w|--word Respect Word boundaries (delimited by spaces)", file=sys.stderr) print(" --debug Enable debug mode", file=sys.stderr) print(" --help|-? Print usage and this help message and exit", file=sys.stderr) print(" --version Print version and exit", file=sys.stderr) print(" -- Options processing terminator", file=sys.stderr) print(file=sys.stderr) ################################################################################ def process_environment_variables(): """Process environment variables""" if "NGC_DEBUG" in os.environ.keys(): logging.disable(logging.NOTSET) ################################################################################ def process_command_line(): """Process command line""" # pylint: disable=C0103 global parameters # pylint: enable=C0103 # option letters followed by : expect an argument # same for option strings followed by = character_options = "bc:d:l:p:qstw?" string_options = [ "block", "convert=", "debug", "discard=", "help", "length=", "partial=", "quiet", "summary", "text", "version", "word", ] try: options, remaining_arguments = getopt.getopt( sys.argv[1:], character_options, string_options ) except getopt.GetoptError as error: logging.critical(error) display_help() sys.exit(1) for option, argument in options: if option in ("-b", "--block"): parameters["Fixed block"] = True elif option in ("-c", "--convert"): if 'l' in argument and 'u' in argument: logging.critical("-c|--convert parameter can't contain [lu] at the same time") sys.exit(1) if 'a' in argument: parameters["Convert"]["Unicode to ASCII"] = True if 'l' in argument: parameters["Convert"]["Upper to lower case"] = True if 'u' in argument: parameters["Convert"]["Lower to upper case"] = True if 's' in argument: parameters["Convert"]["Spaces to one space"] = True elif option in ("-d", "--discard"): if 'U' in argument: parameters["Discard"]["Unicode characters"] = True if 'u' in argument: parameters["Discard"]["Upper case letters"] = True if 'l' in argument: parameters["Discard"]["Lower case letters"] = True if 'L' in argument: parameters["Discard"]["Upper case letters"] = True parameters["Discard"]["Lower case letters"] = True if 'c' in argument: parameters["Discard"]["Connection symbols"] = True if 'd' in argument: parameters["Discard"]["Digits"] = True if 'p' in argument: parameters["Discard"]["Punctuation"] = True if 'o' in argument: parameters["Discard"]["Other printable symbols"] = True if 's' in argument: parameters["Discard"]["Spaces"] = True if 'n' in argument: parameters["Discard"]["Control characters"] = True elif option in ("-l", "--length"): if argument.isdigit() and int(argument) >= 0: parameters["Length"] = int(argument) else: logging.critical("-l|--length parameter must be a strictly positive integer") sys.exit(1) elif option in ("-p", "--partial"): if len(argument) > 1 or argument not in ('d', 'k', 'j'): logging.critical("-p|--partial parameter must be a single character among [dkj]") sys.exit(1) if argument == 'd': parameters["Partial"]["Discard"] = True parameters["Partial"]["Keep"] = False elif argument == 'j': parameters["Partial"]["Justify"] = True parameters["Partial"]["Keep"] = False elif option in ("-q", "--quiet"): parameters["Show"]["N-grams"] = False elif option in ("-s", "--summary"): parameters["Show"]["Summary"] = True elif option in ("-t", "--text"): parameters["Show"]["Text"] = True elif option in ("-w", "--word"): parameters["Word boundary"] = True elif option == "--debug": logging.disable(logging.NOTSET) elif option in ("--help", "-?"): display_help() sys.exit(0) elif option == "--version": print(ID.replace("@(" + "#)" + " $" + "Id" + ": ", "").replace(" $", "")) sys.exit(0) logging.debug("process_command_line(): parameters:") logging.debug(parameters) logging.debug("process_command_line(): remaining_arguments:") logging.debug(remaining_arguments) return remaining_arguments ################################################################################ def handle_partial_n_gram(text): """Analyze n-grams frequency in a string""" # pylint: disable=C0103 global occurrences, summary # pylint: enable=C0103 if not parameters["Partial"]["Discard"]: if parameters["Partial"]["Justify"]: for _ in range(parameters["Length"] - len(text)): text += " " if text in occurrences: occurrences[text] += 1 else: occurrences[text] = 1 summary["All n-grams"] += 1 ################################################################################ def frequency_analysis(text): """Analyze n-grams frequency in a string""" # pylint: disable=C0103 global occurrences, summary # pylint: enable=C0103 if parameters["Show"]["Summary"]: for character in text: if ord(character) < 128: if character in string.ascii_uppercase: summary["Upper case letters"] += 1 elif character in string.ascii_lowercase: summary["Lower case letters"] += 1 elif character in ("'", "-"): summary["Connection symbols"] += 1 elif character in string.digits: summary["Digits"] += 1 elif character in (".", ",", ";", ":", "!", "?"): summary["Punctuation"] += 1 elif character == " ": summary["Spaces"] += 1 elif character in string.whitespace: summary["Other spaces"] += 1 elif (ord(character) < 32 and ord(character) not in (9, 11, 12, 13)) \ or ord(character) == 127: summary["Control characters"] += 1 else: summary["Other printable symbols"] += 1 else: summary["All unicode characters"] += 1 if unicode2ascii.is_unicode_letter(character): summary["Unicode letters"] += 1 elif unicode2ascii.is_unicode_mark(character): summary["Unicode marks"] += 1 elif unicode2ascii.is_unicode_number(character): summary["Unicode numbers"] += 1 elif unicode2ascii.is_unicode_punctuation(character): summary["Unicode punctuations"] += 1 elif unicode2ascii.is_unicode_symbol(character): summary["Unicode symbols"] += 1 elif unicode2ascii.is_unicode_separator(character): summary["Unicode separators"] += 1 else: summary["Unicode others"] += 1 if len(text) <= parameters["Length"]: if text: handle_partial_n_gram(text) else: i = 0 while i < len(text) + 1 - parameters["Length"]: sequence = text[i:i + parameters["Length"]] if sequence in occurrences: occurrences[sequence] += 1 else: occurrences[sequence] = 1 summary["All n-grams"] += 1 if parameters["Fixed block"]: i += parameters["Length"] else: i += 1 if i < len(text): handle_partial_n_gram(text[i:]) ################################################################################ def process_line(line): """Process a text line""" # pylint: disable=C0103 global summary # pylint: enable=C0103 line = line.rstrip(os.linesep) # Conversions: if parameters["Convert"]["Unicode to ASCII"]: line = unicode2ascii.unicode_to_ascii_string(line) if parameters["Convert"]["Upper to lower case"]: line = line.lower() if parameters["Convert"]["Lower to upper case"]: line = line.upper() # Discards: if parameters["Discard"]["Unicode characters"]: line = "".join([c for c in line if ord(c) < 128]) if parameters["Discard"]["Upper case letters"]: line = re.sub(r"[A-Z]+", "", line) if parameters["Discard"]["Lower case letters"]: line = re.sub(r"[a-z]+", "", line) if parameters["Discard"]["Connection symbols"]: line = re.sub(r"[-']+", "", line) if parameters["Discard"]["Digits"]: line = re.sub(r"[0-9]+", "", line) if parameters["Discard"]["Punctuation"]: line = re.sub(r"[\.,;:!\?]+", "", line) if parameters["Discard"]["Other printable symbols"]: line = re.sub(r"[\"#$&@\[\\\]_`{|}~%()\*+/<=>^]+", "", line) if parameters["Discard"]["Spaces"]: line = re.sub(r"[" + string.whitespace + r"]+", "", line) if parameters["Discard"]["Control characters"]: line = "".join( [c for c in line if not (ord(c) < 9 or (ord(c) > 13 and ord(c) < 32) or ord(c) == 127)] ) # Late conversions: if parameters["Convert"]["Spaces to one space"]: line = re.sub(r"[" + string.whitespace + r"]+", " ", line) if parameters["Show"]["Text"]: print(line) if parameters["Word boundary"]: # Splitting words on all kind of whitespaces: for word in line.split(): if word: frequency_analysis(word) summary["All characters"] += len(word) else: frequency_analysis(line) summary["All characters"] += len(line) ################################################################################ def process_file(filename): """Process the file designated by filename, line by line""" with open(filename, "r") as file: for line in file.readlines(): process_line(line) ################################################################################ def compute_kappa_plaintext(): """Return kappa_plaintext for the processed input stream""" # pylint: disable=C0103 global occurrences, summary # pylint: enable=C0103 # See https://en.wikipedia.org/wiki/Index_of_coincidence index = 0.0 for occurrence in occurrences.values(): index += occurrence * (occurrence - 1) return index / (summary["All n-grams"] * (summary["All n-grams"] - 1)) ################################################################################ def compute_coincidence_index(kappa_plaintext): """Return coincidence index for a given kappa_plaintext and alphabet""" # pylint: disable=C0103 global summary # pylint: enable=C0103 if summary["Unicode separators"]: # Unknown alphabet size return 0 alphabet_size = 0 if summary["Upper case letters"]: alphabet_size += len(string.ascii_uppercase) if summary["Lower case letters"]: alphabet_size += len(string.ascii_lowercase) if summary["Digits"]: alphabet_size += len(string.digits) if summary["Connection symbols"]: alphabet_size += len("'-") if summary["Punctuation"]: alphabet_size += len(".,;:?!") if summary["Other printable symbols"]: alphabet_size += len("\"#$&@[\\]_`{|}~%()*+/<=>^") if summary["Spaces"]: alphabet_size += 1 if summary["Other spaces"]: alphabet_size += len(string.whitespace) - 1 if summary["Control characters"]: alphabet_size += 29 return kappa_plaintext * alphabet_size ################################################################################ def main(): """The program's main entry point""" program_name = os.path.basename(sys.argv[0]) initialize_debugging(program_name) process_environment_variables() arguments = process_command_line() exit_status = 0 # Reading from files whose name were given as arguments: if len(arguments): for filename in arguments: if os.path.isfile(filename): process_file(filename) else: logging.error("The argument '%s' is not a filename", filename) exit_status = 1 # Reading from standard input as there are no arguments: else: for line in sys.stdin: process_line(line) # Displaying occurrences and frequency by n-gram: if parameters["Show"]["N-grams"]: if parameters["Show"]["Text"]: print("--") decreasing_occurrences = dict(sorted(occurrences.items(), key=lambda t: t[1], reverse=True)) for key, value in decreasing_occurrences.items(): print("'{}'\t{}\t{:.2f}%".format(key, value, (value/summary["All n-grams"])*100)) # Displaying summary: if parameters["Show"]["Summary"]: print("==") for key, value in summary.items(): print("{:23s}\t{:d}".format(key, value)) print() kappa_plaintext = compute_kappa_plaintext() coincidence_index = compute_coincidence_index(kappa_plaintext) print("{:23s}\t{}".format("Kappa-plaintext", kappa_plaintext)) print("{:23s}\t{}".format("Index of coincidence", coincidence_index)) sys.exit(exit_status) if __name__ == "__main__": main()
de
0.409701
#!/usr/bin/env python ngc - n-grams count License: 3-clause BSD (see https://opensource.org/licenses/BSD-3-Clause) Author: <NAME> # Version string used by the what(1) and ident(1) commands: #) $Id: ngc - n-grams count v1.0.2 (September 26, 2021) by <NAME> $" # Default parameters. Can be superseded by command line options # ' - # . , ; : ! ? # space tab return formfeed vtab # Sliding-window mode by default ################################################################################ Debugging set up ################################################################################ Displays usage and help ################################################################################ Process environment variables ################################################################################ Process command line # pylint: disable=C0103 # pylint: enable=C0103 # option letters followed by : expect an argument # same for option strings followed by = ################################################################################ Analyze n-grams frequency in a string # pylint: disable=C0103 # pylint: enable=C0103 ################################################################################ Analyze n-grams frequency in a string # pylint: disable=C0103 # pylint: enable=C0103 ################################################################################ Process a text line # pylint: disable=C0103 # pylint: enable=C0103 # Conversions: # Discards: # Late conversions: # Splitting words on all kind of whitespaces: ################################################################################ Process the file designated by filename, line by line ################################################################################ Return kappa_plaintext for the processed input stream # pylint: disable=C0103 # pylint: enable=C0103 # See https://en.wikipedia.org/wiki/Index_of_coincidence ################################################################################ Return coincidence index for a given kappa_plaintext and alphabet # pylint: disable=C0103 # pylint: enable=C0103 # Unknown alphabet size ################################################################################ The program's main entry point # Reading from files whose name were given as arguments: # Reading from standard input as there are no arguments: # Displaying occurrences and frequency by n-gram: # Displaying summary:
2.275514
2
openquake.hazardlib/openquake/hazardlib/tests/gsim/campbell_2003_test.py
rainzhop/ConvNetQuake
0
9843
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2012-2016 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. from openquake.hazardlib.gsim.campbell_2003 import ( Campbell2003, Campbell2003SHARE, Campbell2003MblgAB1987NSHMP2008, Campbell2003MblgJ1996NSHMP2008, Campbell2003MwNSHMP2008 ) from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase import numpy # Test data generated from OpenSHA implementation. class Campbell2003TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003 def test_mean(self): self.check('C03/C03_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003SHARETestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003SHARE def test_mean(self): self.check('C03/C03SHARE_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03SHARE_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003MblgAB1987NSHMP2008TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003MblgAB1987NSHMP2008 # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` def test_mean(self): self.check('C03/C03MblgAB1987NSHMP2008_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03MblgAB1987NSHMP2008_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003MblgJ1996NSHMP2008TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003MblgJ1996NSHMP2008 # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` def test_mean(self): self.check('C03/C03MblgJ1996NSHMP2008_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03MblgJ1996NSHMP2008_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003MwNSHMP2008TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003MwNSHMP2008 # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` def test_mean(self): self.check('C03/C03MwNSHMP2008_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03MwNSHMP2008_STD_TOTAL.csv', max_discrep_percentage=0.1)
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2012-2016 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. from openquake.hazardlib.gsim.campbell_2003 import ( Campbell2003, Campbell2003SHARE, Campbell2003MblgAB1987NSHMP2008, Campbell2003MblgJ1996NSHMP2008, Campbell2003MwNSHMP2008 ) from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase import numpy # Test data generated from OpenSHA implementation. class Campbell2003TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003 def test_mean(self): self.check('C03/C03_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003SHARETestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003SHARE def test_mean(self): self.check('C03/C03SHARE_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03SHARE_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003MblgAB1987NSHMP2008TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003MblgAB1987NSHMP2008 # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` def test_mean(self): self.check('C03/C03MblgAB1987NSHMP2008_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03MblgAB1987NSHMP2008_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003MblgJ1996NSHMP2008TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003MblgJ1996NSHMP2008 # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` def test_mean(self): self.check('C03/C03MblgJ1996NSHMP2008_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03MblgJ1996NSHMP2008_STD_TOTAL.csv', max_discrep_percentage=0.1) class Campbell2003MwNSHMP2008TestCase(BaseGSIMTestCase): GSIM_CLASS = Campbell2003MwNSHMP2008 # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` def test_mean(self): self.check('C03/C03MwNSHMP2008_MEAN.csv', max_discrep_percentage=0.1) def test_std_total(self): self.check('C03/C03MwNSHMP2008_STD_TOTAL.csv', max_discrep_percentage=0.1)
en
0.788075
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2012-2016 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. # Test data generated from OpenSHA implementation. # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f`` # test data generated from ``subroutine getCampCEUS`` in ``hazgridXnga2.f``
1.822975
2
sktime/regression/interval_based/_tsf.py
khrapovs/sktime
1
9844
<gh_stars>1-10 # -*- coding: utf-8 -*- """Time Series Forest Regressor (TSF).""" __author__ = ["<NAME>", "kkoziara", "luiszugasti", "kanand77", "<NAME>"] __all__ = ["TimeSeriesForestRegressor"] import numpy as np from joblib import Parallel, delayed from sklearn.ensemble._forest import ForestRegressor from sklearn.tree import DecisionTreeRegressor from sktime.regression.base import BaseRegressor from sktime.series_as_features.base.estimators.interval_based._tsf import ( BaseTimeSeriesForest, _transform, ) class TimeSeriesForestRegressor(BaseTimeSeriesForest, ForestRegressor, BaseRegressor): """Time series forest regressor. A time series forest is an ensemble of decision trees built on random intervals. Overview: For input data with n series of length m, for each tree: - sample sqrt(m) intervals, - find mean, std and slope for each interval, concatenate to form new data set, - build decision tree on new data set. Ensemble the trees with averaged probability estimates. This implementation deviates from the original in minor ways. It samples intervals with replacement and does not use the splitting criteria tiny refinement described in [1]_. This is an intentionally stripped down, non configurable version for use as a HIVE-COTE component. Parameters ---------- n_estimators : int, default=200 Number of estimators. min_interval : int, default=3 Minimum width of an interval. n_jobs : int, default=1 The number of jobs to run in parallel for both `fit` and `predict`. ``-1`` means using all processors. random_state : int, default=None Attributes ---------- n_classes : int Number of classes. n_intervals : int Number of intervals. classes_ : list List of classes for a given problem. See Also -------- TimeSeriesForestClassifier References ---------- .. [1] H.Deng, G.Runger, E.Tuv and M.Vladimir, "A time series forest for classification and feature extraction", Information Sciences, 239, 2013 .. [2] Java implementation https://github.com/uea-machine-learning/tsml .. [3] Arxiv paper: https://arxiv.org/abs/1302.2277 """ _tags = { "capability:multivariate": False, "X_inner_mtype": "numpy3D", } _base_estimator = DecisionTreeRegressor() def fit(self, X, y): """Override sklearn forest fit with BaseRegressor fit.""" return BaseRegressor.fit(self, X, y) def _fit(self, X, y): """Wrap BaseForest._fit. This is a temporary measure prior to the BaseRegressor refactor. """ return BaseTimeSeriesForest._fit(self, X, y) def predict(self, X): """Override sklearn forest predict with BaseRegressor predict.""" return BaseRegressor.predict(self, X) def _predict(self, X): """Predict. Parameters ---------- X : pd.DataFrame or np.ndarray Panel data Returns ------- np.ndarray Predictions. """ X = X.squeeze(1) _, series_length = X.shape if series_length != self.series_length: raise TypeError( "The number of time points in the training data does not match " "that in the test data." ) y_pred = Parallel(n_jobs=self.n_jobs)( delayed(_predict)(X, self.estimators_[i], self.intervals_[i]) for i in range(self.n_estimators) ) return np.mean(y_pred, axis=0) def _predict(X, estimator, intervals): Xt = _transform(X, intervals) return estimator.predict(Xt)
# -*- coding: utf-8 -*- """Time Series Forest Regressor (TSF).""" __author__ = ["<NAME>", "kkoziara", "luiszugasti", "kanand77", "<NAME>"] __all__ = ["TimeSeriesForestRegressor"] import numpy as np from joblib import Parallel, delayed from sklearn.ensemble._forest import ForestRegressor from sklearn.tree import DecisionTreeRegressor from sktime.regression.base import BaseRegressor from sktime.series_as_features.base.estimators.interval_based._tsf import ( BaseTimeSeriesForest, _transform, ) class TimeSeriesForestRegressor(BaseTimeSeriesForest, ForestRegressor, BaseRegressor): """Time series forest regressor. A time series forest is an ensemble of decision trees built on random intervals. Overview: For input data with n series of length m, for each tree: - sample sqrt(m) intervals, - find mean, std and slope for each interval, concatenate to form new data set, - build decision tree on new data set. Ensemble the trees with averaged probability estimates. This implementation deviates from the original in minor ways. It samples intervals with replacement and does not use the splitting criteria tiny refinement described in [1]_. This is an intentionally stripped down, non configurable version for use as a HIVE-COTE component. Parameters ---------- n_estimators : int, default=200 Number of estimators. min_interval : int, default=3 Minimum width of an interval. n_jobs : int, default=1 The number of jobs to run in parallel for both `fit` and `predict`. ``-1`` means using all processors. random_state : int, default=None Attributes ---------- n_classes : int Number of classes. n_intervals : int Number of intervals. classes_ : list List of classes for a given problem. See Also -------- TimeSeriesForestClassifier References ---------- .. [1] H.Deng, G.Runger, E.Tuv and M.Vladimir, "A time series forest for classification and feature extraction", Information Sciences, 239, 2013 .. [2] Java implementation https://github.com/uea-machine-learning/tsml .. [3] Arxiv paper: https://arxiv.org/abs/1302.2277 """ _tags = { "capability:multivariate": False, "X_inner_mtype": "numpy3D", } _base_estimator = DecisionTreeRegressor() def fit(self, X, y): """Override sklearn forest fit with BaseRegressor fit.""" return BaseRegressor.fit(self, X, y) def _fit(self, X, y): """Wrap BaseForest._fit. This is a temporary measure prior to the BaseRegressor refactor. """ return BaseTimeSeriesForest._fit(self, X, y) def predict(self, X): """Override sklearn forest predict with BaseRegressor predict.""" return BaseRegressor.predict(self, X) def _predict(self, X): """Predict. Parameters ---------- X : pd.DataFrame or np.ndarray Panel data Returns ------- np.ndarray Predictions. """ X = X.squeeze(1) _, series_length = X.shape if series_length != self.series_length: raise TypeError( "The number of time points in the training data does not match " "that in the test data." ) y_pred = Parallel(n_jobs=self.n_jobs)( delayed(_predict)(X, self.estimators_[i], self.intervals_[i]) for i in range(self.n_estimators) ) return np.mean(y_pred, axis=0) def _predict(X, estimator, intervals): Xt = _transform(X, intervals) return estimator.predict(Xt)
en
0.714633
# -*- coding: utf-8 -*- Time Series Forest Regressor (TSF). Time series forest regressor. A time series forest is an ensemble of decision trees built on random intervals. Overview: For input data with n series of length m, for each tree: - sample sqrt(m) intervals, - find mean, std and slope for each interval, concatenate to form new data set, - build decision tree on new data set. Ensemble the trees with averaged probability estimates. This implementation deviates from the original in minor ways. It samples intervals with replacement and does not use the splitting criteria tiny refinement described in [1]_. This is an intentionally stripped down, non configurable version for use as a HIVE-COTE component. Parameters ---------- n_estimators : int, default=200 Number of estimators. min_interval : int, default=3 Minimum width of an interval. n_jobs : int, default=1 The number of jobs to run in parallel for both `fit` and `predict`. ``-1`` means using all processors. random_state : int, default=None Attributes ---------- n_classes : int Number of classes. n_intervals : int Number of intervals. classes_ : list List of classes for a given problem. See Also -------- TimeSeriesForestClassifier References ---------- .. [1] H.Deng, G.Runger, E.Tuv and M.Vladimir, "A time series forest for classification and feature extraction", Information Sciences, 239, 2013 .. [2] Java implementation https://github.com/uea-machine-learning/tsml .. [3] Arxiv paper: https://arxiv.org/abs/1302.2277 Override sklearn forest fit with BaseRegressor fit. Wrap BaseForest._fit. This is a temporary measure prior to the BaseRegressor refactor. Override sklearn forest predict with BaseRegressor predict. Predict. Parameters ---------- X : pd.DataFrame or np.ndarray Panel data Returns ------- np.ndarray Predictions.
3.01756
3
vectorc2/vectorc2/settings.py
sebastiankruk/vectorc2
11
9845
<filename>vectorc2/vectorc2/settings.py<gh_stars>10-100 """ Django settings for vectorc2 project. Copyright 2019 <NAME> <<EMAIL>> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '#9iml9@=i%x#i57qi1zm)&)p46hrf(g=pn7jioagsh*))6+z9(' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [ "localhost", "127.0.0.1", "0.0.0.0" ] # Application definition INSTALLED_APPS = [ 'space', 'command', 'bootstrap4', 'octicons', 'nonicons', 'blocks', 'photos', 'morse', 'webview.apps.WebviewConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'channels', # 'compressor', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'vectorc2.urls' STATICFILES_DIRS = [ os.path.join(BASE_DIR, "static") ] MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ os.path.join(BASE_DIR, 'webview', 'templates') ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.request', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'vectorc2.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' # languages from django.utils.translation import gettext_lazy as _ LANGUAGES = [ ('pl', _('Polish')), ('en', _('English')), ] # Default settings BOOTSTRAP4 = { # The complete URL to the Bootstrap CSS file # Note that a URL can be either a string, # e.g. "https://stackpath.bootstrapcdn.com/bootstrap/4.1.1/css/bootstrap.min.css", # or a dict like the default value below. "css_url": { "href": "/static/style/bootstrap/bootstrap.min.css", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The complete URL to the Bootstrap JavaScript file "javascript_url": { "url": "/static/script/bootstrap/bootstrap.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The complete URL to the Bootstrap CSS file (None means no theme) "theme_url": None, # The URL to the jQuery JavaScript file (full) "jquery_url": { "url": "/static/script/bootstrap/jquery-3.3.1.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The URL to the jQuery JavaScript file (slim) "jquery_slim_url": { "url": "/static/script/bootstrap/jquery-3.3.1.slim.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The URL to the Popper.js JavaScript file (slim) "popper_url": { "url": "/static/script/bootstrap/popper.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # Put JavaScript in the HEAD section of the HTML document (only relevant if you use bootstrap4.html) 'javascript_in_head': False, # Include jQuery with Bootstrap JavaScript False|falsy|slim|full (default=False) # False - means tag bootstrap_javascript use default value - `falsy` and does not include jQuery) 'include_jquery': False, # Label class to use in horizontal forms 'horizontal_label_class': 'col-md-3', # Field class to use in horizontal forms 'horizontal_field_class': 'col-md-9', # Set placeholder attributes to label if no placeholder is provided 'set_placeholder': True, # Class to indicate required (better to set this in your Django form) 'required_css_class': '', # Class to indicate error (better to set this in your Django form) 'error_css_class': 'has-error', # Class to indicate success, meaning the field has valid input (better to set this in your Django form) 'success_css_class': 'has-success', # Renderers (only set these if you have studied the source and understand the inner workings) 'formset_renderers':{ 'default': 'bootstrap4.renderers.FormsetRenderer', }, 'form_renderers': { 'default': 'bootstrap4.renderers.FormRenderer', }, 'field_renderers': { 'default': 'bootstrap4.renderers.FieldRenderer', 'inline': 'bootstrap4.renderers.InlineFieldRenderer', }, } ASGI_APPLICATION = "vectorc2.routing.application" CHANNEL_LAYERS = { 'default': { 'BACKEND': 'channels_redis.core.RedisChannelLayer', 'CONFIG': { "hosts": [('127.0.0.1', 6379)], }, }, } VECTOR = { } # #TODO # STATICFILES_FINDERS = [ # 'compressor.finders.CompressorFinder' # ] # COMPRESS_ENABLED = False # COMPRESS_ROOT = os.path.join(BASE_DIR, 'static_collected')
<filename>vectorc2/vectorc2/settings.py<gh_stars>10-100 """ Django settings for vectorc2 project. Copyright 2019 <NAME> <<EMAIL>> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '#9iml9@=i%x#i57qi1zm)&)p46hrf(g=pn7jioagsh*))6+z9(' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [ "localhost", "127.0.0.1", "0.0.0.0" ] # Application definition INSTALLED_APPS = [ 'space', 'command', 'bootstrap4', 'octicons', 'nonicons', 'blocks', 'photos', 'morse', 'webview.apps.WebviewConfig', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'channels', # 'compressor', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'vectorc2.urls' STATICFILES_DIRS = [ os.path.join(BASE_DIR, "static") ] MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ os.path.join(BASE_DIR, 'webview', 'templates') ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.request', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'vectorc2.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' # languages from django.utils.translation import gettext_lazy as _ LANGUAGES = [ ('pl', _('Polish')), ('en', _('English')), ] # Default settings BOOTSTRAP4 = { # The complete URL to the Bootstrap CSS file # Note that a URL can be either a string, # e.g. "https://stackpath.bootstrapcdn.com/bootstrap/4.1.1/css/bootstrap.min.css", # or a dict like the default value below. "css_url": { "href": "/static/style/bootstrap/bootstrap.min.css", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The complete URL to the Bootstrap JavaScript file "javascript_url": { "url": "/static/script/bootstrap/bootstrap.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The complete URL to the Bootstrap CSS file (None means no theme) "theme_url": None, # The URL to the jQuery JavaScript file (full) "jquery_url": { "url": "/static/script/bootstrap/jquery-3.3.1.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The URL to the jQuery JavaScript file (slim) "jquery_slim_url": { "url": "/static/script/bootstrap/jquery-3.3.1.slim.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # The URL to the Popper.js JavaScript file (slim) "popper_url": { "url": "/static/script/bootstrap/popper.min.js", # "integrity": "<KEY>", "crossorigin": "anonymous", }, # Put JavaScript in the HEAD section of the HTML document (only relevant if you use bootstrap4.html) 'javascript_in_head': False, # Include jQuery with Bootstrap JavaScript False|falsy|slim|full (default=False) # False - means tag bootstrap_javascript use default value - `falsy` and does not include jQuery) 'include_jquery': False, # Label class to use in horizontal forms 'horizontal_label_class': 'col-md-3', # Field class to use in horizontal forms 'horizontal_field_class': 'col-md-9', # Set placeholder attributes to label if no placeholder is provided 'set_placeholder': True, # Class to indicate required (better to set this in your Django form) 'required_css_class': '', # Class to indicate error (better to set this in your Django form) 'error_css_class': 'has-error', # Class to indicate success, meaning the field has valid input (better to set this in your Django form) 'success_css_class': 'has-success', # Renderers (only set these if you have studied the source and understand the inner workings) 'formset_renderers':{ 'default': 'bootstrap4.renderers.FormsetRenderer', }, 'form_renderers': { 'default': 'bootstrap4.renderers.FormRenderer', }, 'field_renderers': { 'default': 'bootstrap4.renderers.FieldRenderer', 'inline': 'bootstrap4.renderers.InlineFieldRenderer', }, } ASGI_APPLICATION = "vectorc2.routing.application" CHANNEL_LAYERS = { 'default': { 'BACKEND': 'channels_redis.core.RedisChannelLayer', 'CONFIG': { "hosts": [('127.0.0.1', 6379)], }, }, } VECTOR = { } # #TODO # STATICFILES_FINDERS = [ # 'compressor.finders.CompressorFinder' # ] # COMPRESS_ENABLED = False # COMPRESS_ROOT = os.path.join(BASE_DIR, 'static_collected')
en
0.692262
Django settings for vectorc2 project. Copyright 2019 <NAME> <<EMAIL>> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. # Build paths inside the project like this: os.path.join(BASE_DIR, ...) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! #i57qi1zm)&)p46hrf(g=pn7jioagsh*))6+z9(' # SECURITY WARNING: don't run with debug turned on in production! # Application definition # 'compressor', # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ # languages # Default settings # The complete URL to the Bootstrap CSS file # Note that a URL can be either a string, # e.g. "https://stackpath.bootstrapcdn.com/bootstrap/4.1.1/css/bootstrap.min.css", # or a dict like the default value below. # "integrity": "<KEY>", # The complete URL to the Bootstrap JavaScript file # "integrity": "<KEY>", # The complete URL to the Bootstrap CSS file (None means no theme) # The URL to the jQuery JavaScript file (full) # "integrity": "<KEY>", # The URL to the jQuery JavaScript file (slim) # "integrity": "<KEY>", # The URL to the Popper.js JavaScript file (slim) # "integrity": "<KEY>", # Put JavaScript in the HEAD section of the HTML document (only relevant if you use bootstrap4.html) # Include jQuery with Bootstrap JavaScript False|falsy|slim|full (default=False) # False - means tag bootstrap_javascript use default value - `falsy` and does not include jQuery) # Label class to use in horizontal forms # Field class to use in horizontal forms # Set placeholder attributes to label if no placeholder is provided # Class to indicate required (better to set this in your Django form) # Class to indicate error (better to set this in your Django form) # Class to indicate success, meaning the field has valid input (better to set this in your Django form) # Renderers (only set these if you have studied the source and understand the inner workings) # #TODO # STATICFILES_FINDERS = [ # 'compressor.finders.CompressorFinder' # ] # COMPRESS_ENABLED = False # COMPRESS_ROOT = os.path.join(BASE_DIR, 'static_collected')
1.627935
2
datahandlers/wgisd.py
mikewoodson/ssl-transfer
0
9846
<filename>datahandlers/wgisd.py from torchvision.datasets.folder import pil_loader, accimage_loader, default_loader from torch import Tensor from pathlib import Path from enum import Enum from collections import namedtuple from torchvision import transforms as T import os import numpy as np import pdb import functools import torch.utils.data as data import torch class ConversionType(Enum): centerToVert = 1 def convert_bbox_format(boxes: Tensor, conversionType: int) -> Tensor: if conversionType > ConversionType.centerToVert.value: raise ValueError( f"conversionType must be less than" + "{ConversionType.centerToVert.value}, received {conversionType}") if conversionType == ConversionType.centerToVert.value: # convert box annotations from (Cx,Cy,W,H) to (X0,Y0,X1,Y1) box_centers = boxes[:, [0, 1, 0, 1]] box_wh = 0.5 * boxes[:, [2, 3, 2, 3]] box_wh[:, :2] *= -1 convertedBoxes = box_centers + box_wh else: raise ValueError return convertedBoxes class Wgisd(data.Dataset): """`FGVC-Aircraft <http://www.robots.ox.ac.uk/~vgg/data/fgvc-aircraft>`_ Dataset. Args: root (string): Root directory path to dataset. transform (callable, optional): A function/transform that takes in a PIL image and returns a transformed version. E.g. ``transforms.RandomCrop`` loader (callable, optional): A function to load an image given its path. download (bool, optional): If true, downloads the dataset from the internet and puts it in the root directory. If dataset is already downloaded, it is not downloaded again. """ url = 'https://github.com/thsant/wgisd.git' splits = ('train', 'test') def __init__(self, root, split='train', transform=None, loader=default_loader, download=False, val_size=0.2): if split not in self.splits: raise ValueError( 'Split "{}" not found. Valid splits are: {}'.format( split, ', '.join( self.splits), )) if val_size < 0 or val_size > 1: raise ValueError('val_size should be a fraction between 0 and 1') self.root = Path(root) self.split = split # There's no file specifying a validation dataset, so use a subset of the # training dataset dset_file = self.split self.classes_file = self.root / f'{dset_file}.txt' if download: self.download() self.transform = transform self.loader = loader self.id_to_fname = {} self.val_size = val_size self.total_set = None self.samples = None self.create_dataset() self.mode = 'test' if self.split == 'test' else 'train' @property def mode(self): return self._mode @mode.setter def mode(self, mode): if self.split == 'test': self._mode = 'test' self.partition_dset() return supported_modes = ['train', 'val', 'trainval'] if mode not in supported_modes: raise ValueError(f'mode must be one of {supported_modes}') self._mode = mode self.partition_dset() def create_dataset(self): image_names = [] samples = [] with open(self.classes_file, 'r') as f: for line in f: image_names.append(line.rstrip()) data_dir = self.root / 'data' # Read bbox annotations from file for idx, img_name in enumerate(image_names): target = {} gt_boxes = [] annotations = data_dir / f'{img_name}.txt' img_path = data_dir / f'{img_name}.jpg' with annotations.open() as f: for line in f: gt_boxes.append(line.split()[1:]) gt_np = np.array(gt_boxes, dtype=np.float32) gt_tensor = torch.as_tensor(gt_np, dtype=torch.float32) boxes = convert_bbox_format(gt_tensor, conversionType=1) img = self.loader(img_path) width, height = img.size boxes[:, [0, 2]] = boxes[:, [0, 2]] * width boxes[:, [1, 3]] = boxes[:, [1, 3]] * height boxes = boxes.to(dtype=torch.int32) numObjs = boxes.shape[0] labels = torch.ones((numObjs,), dtype=torch.int64) iscrowd = torch.zeros((numObjs,), dtype=torch.int64) image_id = torch.tensor([idx]) self.id_to_fname[image_id.item()] = img_path.parts[-1] area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0]) target['boxes'] = boxes target['labels'] = labels target['image_id'] = image_id target['area'] = area target['iscrowd'] = iscrowd samples.append((img_path, target)) self.total_set = samples def partition_dset(self): num_images = len(self.total_set) split = int(np.floor(self.val_size * num_images)) if self.mode == 'trainval': self.samples = self.total_set elif self.mode == 'train': self.samples = self.total_set[split:] elif self.mode == 'val': self.samples = self.total_set[:split] else: self.samples = self.total_set @functools.cached_property def mean(self): n_pixels = 0 pix_sum = torch.zeros([3]) for img_path, _ in self.total_set: img = self.loader(img_path) w,h = img.size im_tensor = T.ToTensor()(img) pix_sum += im_tensor.sum([1,2]) n_pixels += (w*h) pix_avg = pix_sum / n_pixels return pix_avg @functools.cached_property def stddev(self): avg = self.mean avg = avg.reshape([3, 1, 1]) var_sum = torch.zeros([3]) n_pixels = 0 for img_path, _ in self.total_set: img = self.loader(img_path) w,h = img.size im_tensor = T.ToTensor()(img) var_sum += ((im_tensor - avg)**2).sum([1,2]) n_pixels += (w*h) var = var_sum / n_pixels return torch.sqrt(var) def get_fname(self, img_id): return self.id_to_fname[img_id.item()] def __getitem__(self, index): """ Args: index (int): Index Returns: tuple: (sample, target) where target is class_index of the target class. """ path, target = self.samples[index] sample = self.loader(path) if self.transform is not None: sample, target = self.transform(sample, target) return sample, target def __len__(self): return len(self.samples) def __repr__(self): fmt_str = 'Dataset ' + self.__class__.__name__ + '\n' fmt_str += ' Number of datapoints: {}\n'.format(self.__len__()) fmt_str += ' Root Location: {}\n'.format(self.root) tmp = ' Transforms (if any): ' fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp))) return fmt_str def _check_exists(self): return self.root.exists() and self.classes_file.exists() def download(self): """Download the wgisd data if it doesn't exist already.""" import requests import tarfile from git import Repo if self._check_exists(): return print('Downloading %s ... (may take a few minutes)' % self.url) self.root.mkdir() Repo.clone_from(self.url, str(self.root)) print('Done!')
<filename>datahandlers/wgisd.py from torchvision.datasets.folder import pil_loader, accimage_loader, default_loader from torch import Tensor from pathlib import Path from enum import Enum from collections import namedtuple from torchvision import transforms as T import os import numpy as np import pdb import functools import torch.utils.data as data import torch class ConversionType(Enum): centerToVert = 1 def convert_bbox_format(boxes: Tensor, conversionType: int) -> Tensor: if conversionType > ConversionType.centerToVert.value: raise ValueError( f"conversionType must be less than" + "{ConversionType.centerToVert.value}, received {conversionType}") if conversionType == ConversionType.centerToVert.value: # convert box annotations from (Cx,Cy,W,H) to (X0,Y0,X1,Y1) box_centers = boxes[:, [0, 1, 0, 1]] box_wh = 0.5 * boxes[:, [2, 3, 2, 3]] box_wh[:, :2] *= -1 convertedBoxes = box_centers + box_wh else: raise ValueError return convertedBoxes class Wgisd(data.Dataset): """`FGVC-Aircraft <http://www.robots.ox.ac.uk/~vgg/data/fgvc-aircraft>`_ Dataset. Args: root (string): Root directory path to dataset. transform (callable, optional): A function/transform that takes in a PIL image and returns a transformed version. E.g. ``transforms.RandomCrop`` loader (callable, optional): A function to load an image given its path. download (bool, optional): If true, downloads the dataset from the internet and puts it in the root directory. If dataset is already downloaded, it is not downloaded again. """ url = 'https://github.com/thsant/wgisd.git' splits = ('train', 'test') def __init__(self, root, split='train', transform=None, loader=default_loader, download=False, val_size=0.2): if split not in self.splits: raise ValueError( 'Split "{}" not found. Valid splits are: {}'.format( split, ', '.join( self.splits), )) if val_size < 0 or val_size > 1: raise ValueError('val_size should be a fraction between 0 and 1') self.root = Path(root) self.split = split # There's no file specifying a validation dataset, so use a subset of the # training dataset dset_file = self.split self.classes_file = self.root / f'{dset_file}.txt' if download: self.download() self.transform = transform self.loader = loader self.id_to_fname = {} self.val_size = val_size self.total_set = None self.samples = None self.create_dataset() self.mode = 'test' if self.split == 'test' else 'train' @property def mode(self): return self._mode @mode.setter def mode(self, mode): if self.split == 'test': self._mode = 'test' self.partition_dset() return supported_modes = ['train', 'val', 'trainval'] if mode not in supported_modes: raise ValueError(f'mode must be one of {supported_modes}') self._mode = mode self.partition_dset() def create_dataset(self): image_names = [] samples = [] with open(self.classes_file, 'r') as f: for line in f: image_names.append(line.rstrip()) data_dir = self.root / 'data' # Read bbox annotations from file for idx, img_name in enumerate(image_names): target = {} gt_boxes = [] annotations = data_dir / f'{img_name}.txt' img_path = data_dir / f'{img_name}.jpg' with annotations.open() as f: for line in f: gt_boxes.append(line.split()[1:]) gt_np = np.array(gt_boxes, dtype=np.float32) gt_tensor = torch.as_tensor(gt_np, dtype=torch.float32) boxes = convert_bbox_format(gt_tensor, conversionType=1) img = self.loader(img_path) width, height = img.size boxes[:, [0, 2]] = boxes[:, [0, 2]] * width boxes[:, [1, 3]] = boxes[:, [1, 3]] * height boxes = boxes.to(dtype=torch.int32) numObjs = boxes.shape[0] labels = torch.ones((numObjs,), dtype=torch.int64) iscrowd = torch.zeros((numObjs,), dtype=torch.int64) image_id = torch.tensor([idx]) self.id_to_fname[image_id.item()] = img_path.parts[-1] area = (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0]) target['boxes'] = boxes target['labels'] = labels target['image_id'] = image_id target['area'] = area target['iscrowd'] = iscrowd samples.append((img_path, target)) self.total_set = samples def partition_dset(self): num_images = len(self.total_set) split = int(np.floor(self.val_size * num_images)) if self.mode == 'trainval': self.samples = self.total_set elif self.mode == 'train': self.samples = self.total_set[split:] elif self.mode == 'val': self.samples = self.total_set[:split] else: self.samples = self.total_set @functools.cached_property def mean(self): n_pixels = 0 pix_sum = torch.zeros([3]) for img_path, _ in self.total_set: img = self.loader(img_path) w,h = img.size im_tensor = T.ToTensor()(img) pix_sum += im_tensor.sum([1,2]) n_pixels += (w*h) pix_avg = pix_sum / n_pixels return pix_avg @functools.cached_property def stddev(self): avg = self.mean avg = avg.reshape([3, 1, 1]) var_sum = torch.zeros([3]) n_pixels = 0 for img_path, _ in self.total_set: img = self.loader(img_path) w,h = img.size im_tensor = T.ToTensor()(img) var_sum += ((im_tensor - avg)**2).sum([1,2]) n_pixels += (w*h) var = var_sum / n_pixels return torch.sqrt(var) def get_fname(self, img_id): return self.id_to_fname[img_id.item()] def __getitem__(self, index): """ Args: index (int): Index Returns: tuple: (sample, target) where target is class_index of the target class. """ path, target = self.samples[index] sample = self.loader(path) if self.transform is not None: sample, target = self.transform(sample, target) return sample, target def __len__(self): return len(self.samples) def __repr__(self): fmt_str = 'Dataset ' + self.__class__.__name__ + '\n' fmt_str += ' Number of datapoints: {}\n'.format(self.__len__()) fmt_str += ' Root Location: {}\n'.format(self.root) tmp = ' Transforms (if any): ' fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp))) return fmt_str def _check_exists(self): return self.root.exists() and self.classes_file.exists() def download(self): """Download the wgisd data if it doesn't exist already.""" import requests import tarfile from git import Repo if self._check_exists(): return print('Downloading %s ... (may take a few minutes)' % self.url) self.root.mkdir() Repo.clone_from(self.url, str(self.root)) print('Done!')
en
0.738481
# convert box annotations from (Cx,Cy,W,H) to (X0,Y0,X1,Y1) `FGVC-Aircraft <http://www.robots.ox.ac.uk/~vgg/data/fgvc-aircraft>`_ Dataset. Args: root (string): Root directory path to dataset. transform (callable, optional): A function/transform that takes in a PIL image and returns a transformed version. E.g. ``transforms.RandomCrop`` loader (callable, optional): A function to load an image given its path. download (bool, optional): If true, downloads the dataset from the internet and puts it in the root directory. If dataset is already downloaded, it is not downloaded again. # There's no file specifying a validation dataset, so use a subset of the # training dataset # Read bbox annotations from file Args: index (int): Index Returns: tuple: (sample, target) where target is class_index of the target class. Download the wgisd data if it doesn't exist already.
2.637414
3
SimpleCV/MachineLearning/query_imgs/get_imgs_geo_gps_search.py
nikhilgk/SimpleCV
2
9847
<reponame>nikhilgk/SimpleCV<filename>SimpleCV/MachineLearning/query_imgs/get_imgs_geo_gps_search.py #!/usr/bin/python # # So this script is in a bit of a hack state right now. # This script reads # # # # Graciously copied and modified from: # http://graphics.cs.cmu.edu/projects/im2gps/flickr_code.html #Image querying script written by <NAME>, #and extended heavily James Hays #9/26/2007 added dynamic timeslices to query more efficiently. #8/18/2008 added new fields and set maximum time slice. #8/19/2008 this is a much simpler function which gets ALL geotagged photos of # sufficient accuracy. No queries, no negative constraints. # divides up the query results into multiple files # 1/5/2009 # now uses date_taken instead of date_upload to get more diverse blocks of images # 1/13/2009 - uses the original im2gps keywords, not as negative constraints though import sys, string, math, time, socket from flickrapi2 import FlickrAPI from datetime import datetime import pycurl import os import shutil socket.setdefaulttimeout(30) #30 second time out on sockets before they throw #an exception. I've been having trouble with urllib.urlopen hanging in the #flickr API. This will show up as exceptions.IOError. #the time out needs to be pretty long, it seems, because the flickr servers can be slow #to respond to our big searches. #returns a query and the search times to attempt to get a desired number of photos #this needs serious refactoring -KAS def DoSearch(fapi,query_string,desired_photos): # number of seconds to skip per query #timeskip = 62899200 #two years #timeskip = 604800 #one week timeskip = 172800 #two days #timeskip = 86400 #one day #timeskip = 3600 #one hour #timeskip = 2257 #for resuming previous query #mintime = 1121832000 #from im2gps #mintime = 1167407788 # resume crash england #mintime = 1177828976 #resume crash japan #mintime = 1187753798 #resume crash greece mintime = 1171416400 #resume crash WashingtonDC maxtime = mintime+timeskip endtime = 1192165200 #10/12/2007, at the end of im2gps queries print datetime.fromtimestamp(mintime) print datetime.fromtimestamp(endtime) while (maxtime < endtime): #new approach - adjust maxtime until we get the desired number of images #within a block. We'll need to keep upper bounds and lower #lower bound is well defined (mintime), but upper bound is not. We can't #search all the way from endtime. lower_bound = mintime + 900 #lower bound OF the upper time limit. must be at least 15 minutes or zero results upper_bound = mintime + timeskip * 20 #upper bound of the upper time limit maxtime = .95 * lower_bound + .05 * upper_bound print '\nBinary search on time range upper bound' print 'Lower bound is ' + str(datetime.fromtimestamp(lower_bound)) print 'Upper bound is ' + str(datetime.fromtimestamp(upper_bound)) keep_going = 6 #search stops after a fixed number of iterations while( keep_going > 0 and maxtime < endtime): try: rsp = fapi.photos_search(api_key=flickrAPIKey, ispublic="1", media="photos", per_page="250", page="1", has_geo = "0", #bbox="-180, -90, 180, 90", text=query_string, accuracy="6", #6 is region level. min_upload_date=str(mintime), max_upload_date=str(maxtime)) #we want to catch these failures somehow and keep going. time.sleep(1) fapi.testFailure(rsp) total_images = rsp.photos[0]['total']; null_test = int(total_images); #want to make sure this won't crash later on for some reason null_test = float(total_images); print '\nnumimgs: ' + total_images print 'mintime: ' + str(mintime) + ' maxtime: ' + str(maxtime) + ' timeskip: ' + str(maxtime - mintime) if( int(total_images) > desired_photos ): print 'too many photos in block, reducing maxtime' upper_bound = maxtime maxtime = (lower_bound + maxtime) / 2 #midpoint between current value and lower bound. if( int(total_images) < desired_photos): print 'too few photos in block, increasing maxtime' lower_bound = maxtime maxtime = (upper_bound + maxtime) / 2 print 'Lower bound is ' + str(datetime.fromtimestamp(lower_bound)) print 'Upper bound is ' + str(datetime.fromtimestamp(upper_bound)) if( int(total_images) > 0): #only if we're not in a degenerate case keep_going = keep_going - 1 else: upper_bound = upper_bound + timeskip; except KeyboardInterrupt: print('Keyboard exception while querying for images, exiting\n') raise except: print sys.exc_info()[0] #print type(inst) # the exception instance #print inst.args # arguments stored in .args #print inst # __str__ allows args to printed directly print ('Exception encountered while querying for images\n') #end of while binary search print 'finished binary search' return([mintime,maxtime,total_images,rsp]) ########################################################################### # Modify this section to reflect your data and specific search ########################################################################### # flickr auth information: # change these to your flickr api keys and secret flickrAPIKey = "<KEY>" # API key flickrSecret = "<KEY>" # shared "secret" rootpath = "../data/" #where do you want the data desired_photos = 1000 #how many photos do you want to try and get query_file_name = 'query.dat' #The file to get the queries from #query_file_name = 'place_rec_queries_fall08.txt' query_file = open(query_file_name, 'r') #aggregate all of the positive and negative queries together. pos_queries = [] #an empty list neg_queries = '' #a string num_queries = 0 for line in query_file: if line[0] != '#' and len(line) > 1: #line end character is 2 long? print line[0:len(line)-1] if line[0] != '-': pos_queries = pos_queries + [line[0:len(line)-1]] num_queries = num_queries + 1 if line[0] == '-': neg_queries = neg_queries + ' ' + line[0:len(line)-1] query_file.close() print 'positive queries: ' print pos_queries print 'negative queries: ' + neg_queries print 'num_queries = ' + str(num_queries) #this is the desired number of photos in each block # make a new FlickrAPI instance fapi = FlickrAPI(flickrAPIKey, flickrSecret) for current_tag in range(0, num_queries): print('TOP OF LOOP') # change this to the location where you want to put your output file try: stats = os.stat(rootpath) except OSError: os.mkdir(rootpath) outpath = rootpath+pos_queries[current_tag]+'/' try: os.mkdir(outpath) except OSError: shutil.rmtree(outpath,True) os.mkdir(outpath) out_file = open(rootpath + pos_queries[current_tag] + '.txt','w') ########################################################################### #form the query string. query_string = pos_queries[current_tag] + ' ' + neg_queries print '\n\nquery_string is ' + query_string total_images_queried = 0; [mintime,maxtime,total_images,rsp] = DoSearch(fapi,query_string,desired_photos) print('GETTING TOTATL IMAGES:'+str(total_images)) s = '\nmintime: ' + str(mintime) + ' maxtime: ' + str(maxtime) print s out_file.write(s + '\n') i = getattr(rsp,'photos',None) if i: s = 'numimgs: ' + total_images print s out_file.write(s + '\n') current_image_num = 1; num = 4 # CHANGE THIS BACK int(rsp.photos[0]['pages']) s = 'total pages: ' + str(num) print s out_file.write(s + '\n') #only visit 16 pages max, to try and avoid the dreaded duplicate bug #16 pages = 4000 images, should be duplicate safe. Most interesting pictures will be taken. num_visit_pages = min(16,num) s = 'visiting only ' + str(num_visit_pages) + ' pages ( up to ' + str(num_visit_pages * 250) + ' images)' print s out_file.write(s + '\n') total_images_queried = total_images_queried + min((num_visit_pages * 250), int(total_images)) #print 'stopping before page ' + str(int(math.ceil(num/3) + 1)) + '\n' pagenum = 1; counter = -1 while( pagenum <= num_visit_pages ): #for pagenum in range(1, num_visit_pages + 1): #page one is searched twice print ' page number ' + str(pagenum) try: print("PAGE") print(pagenum) # WARNING THIS QUERY HAS TO MATCH THE SEARCH QUERY!!!! rsp = fapi.photos_search(api_key=flickrAPIKey, ispublic="1", media="photos", per_page="250", page=str(pagenum), has_geo = "0", text=query_string, #extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views", #accuracy="6", #6 is region level. min_upload_date=str(1121832000),#mintime), max_upload_date=str(1192165200))#maxtime)) #rsp = fapi.photos_search(api_key=flickrAPIKey, # ispublic="1", # media="photos", # per_page="250", # page='0', #str(pagenum), # sort="interestingness-desc", # has_geo = "0", #bbox="-180, -90, 180, 90", # text=query_string, # #accuracy="6", #6 is region level. most things seem 10 or better. # extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views", # min_upload_date=str(mintime), # max_upload_date=str(maxtime)) ##min_taken_date=str(datetime.fromtimestamp(mintime)), ##max_taken_date=str(datetime.fromtimestamp(maxtime))) time.sleep(1) fapi.testFailure(rsp) except KeyboardInterrupt: print('Keyboard exception while querying for images, exiting\n') raise except: print sys.exc_info()[0] #print type(inst) # the exception instance #print inst.args # arguments stored in .args #print inst # __str__ allows args to printed directly print ('Exception encountered while querying for images\n') else: print('got a response') # and print them k = getattr(rsp,'photos',None) if k: print('In K') m = getattr(rsp.photos[0],'photo',None) if m: print('In M') for b in rsp.photos[0].photo: print('In b') if b!=None: counter = counter + 1 ##print(http://farm{farm-id}.static.flickr.com/{server-id}/{id}_{secret}.jpg) myurl = 'http://farm'+b['farm']+".static.flickr.com/"+b['server']+"/"+b['id']+"_"+b['secret']+'.jpg' fname = outpath+pos_queries[current_tag]+str(counter)+'.jpg' #b['id']+"_"+b['secret']+'.jpg' print(myurl) print(fname) mycurl = pycurl.Curl() mycurl.setopt(pycurl.URL, str(myurl)) myfile = open(fname,"wb") mycurl.setopt(pycurl.WRITEDATA, myfile) mycurl.setopt(pycurl.FOLLOWLOCATION, 1) mycurl.setopt(pycurl.MAXREDIRS, 5) mycurl.setopt(pycurl.NOSIGNAL, 1) mycurl.perform() mycurl.close() myfile.close() out_file.write('URL: '+myurl+'\n') out_file.write('File: '+ fname+'\n') out_file.write('photo: ' + b['id'] + ' ' + b['secret'] + ' ' + b['server'] + '\n') out_file.write('owner: ' + b['owner'] + '\n') out_file.write('title: ' + b['title'].encode("ascii","replace") + '\n') out_file.write('originalsecret: ' + b['originalsecret'] + '\n') out_file.write('originalformat: ' + b['originalformat'] + '\n') out_file.write('o_height: ' + b['o_height'] + '\n') out_file.write('o_width: ' + b['o_width'] + '\n') out_file.write('datetaken: ' + b['datetaken'].encode("ascii","replace") + '\n') out_file.write('dateupload: ' + b['dateupload'].encode("ascii","replace") + '\n') out_file.write('tags: ' + b['tags'].encode("ascii","replace") + '\n') out_file.write('license: ' + b['license'].encode("ascii","replace") + '\n') out_file.write('latitude: ' + b['latitude'].encode("ascii","replace") + '\n') out_file.write('longitude: ' + b['longitude'].encode("ascii","replace") + '\n') out_file.write('accuracy: ' + b['accuracy'].encode("ascii","replace") + '\n') out_file.write('views: ' + b['views'] + '\n') out_file.write('interestingness: ' + str(current_image_num) + ' out of ' + str(total_images) + '\n'); out_file.write('\n') current_image_num = current_image_num + 1; print('') pagenum = pagenum + 1; #this is in the else exception block. Itwon't increment for a failure. #this block is indented such that it will only run if there are no exceptions #in the original query. That means if there are exceptions, mintime won't be incremented #and it will try again timeskip = maxtime - mintime #used for initializing next binary search mintime = maxtime out_file.write('Total images queried: ' + str(total_images_queried) + '\n') out_file.close
#!/usr/bin/python # # So this script is in a bit of a hack state right now. # This script reads # # # # Graciously copied and modified from: # http://graphics.cs.cmu.edu/projects/im2gps/flickr_code.html #Image querying script written by <NAME>, #and extended heavily James Hays #9/26/2007 added dynamic timeslices to query more efficiently. #8/18/2008 added new fields and set maximum time slice. #8/19/2008 this is a much simpler function which gets ALL geotagged photos of # sufficient accuracy. No queries, no negative constraints. # divides up the query results into multiple files # 1/5/2009 # now uses date_taken instead of date_upload to get more diverse blocks of images # 1/13/2009 - uses the original im2gps keywords, not as negative constraints though import sys, string, math, time, socket from flickrapi2 import FlickrAPI from datetime import datetime import pycurl import os import shutil socket.setdefaulttimeout(30) #30 second time out on sockets before they throw #an exception. I've been having trouble with urllib.urlopen hanging in the #flickr API. This will show up as exceptions.IOError. #the time out needs to be pretty long, it seems, because the flickr servers can be slow #to respond to our big searches. #returns a query and the search times to attempt to get a desired number of photos #this needs serious refactoring -KAS def DoSearch(fapi,query_string,desired_photos): # number of seconds to skip per query #timeskip = 62899200 #two years #timeskip = 604800 #one week timeskip = 172800 #two days #timeskip = 86400 #one day #timeskip = 3600 #one hour #timeskip = 2257 #for resuming previous query #mintime = 1121832000 #from im2gps #mintime = 1167407788 # resume crash england #mintime = 1177828976 #resume crash japan #mintime = 1187753798 #resume crash greece mintime = 1171416400 #resume crash WashingtonDC maxtime = mintime+timeskip endtime = 1192165200 #10/12/2007, at the end of im2gps queries print datetime.fromtimestamp(mintime) print datetime.fromtimestamp(endtime) while (maxtime < endtime): #new approach - adjust maxtime until we get the desired number of images #within a block. We'll need to keep upper bounds and lower #lower bound is well defined (mintime), but upper bound is not. We can't #search all the way from endtime. lower_bound = mintime + 900 #lower bound OF the upper time limit. must be at least 15 minutes or zero results upper_bound = mintime + timeskip * 20 #upper bound of the upper time limit maxtime = .95 * lower_bound + .05 * upper_bound print '\nBinary search on time range upper bound' print 'Lower bound is ' + str(datetime.fromtimestamp(lower_bound)) print 'Upper bound is ' + str(datetime.fromtimestamp(upper_bound)) keep_going = 6 #search stops after a fixed number of iterations while( keep_going > 0 and maxtime < endtime): try: rsp = fapi.photos_search(api_key=flickrAPIKey, ispublic="1", media="photos", per_page="250", page="1", has_geo = "0", #bbox="-180, -90, 180, 90", text=query_string, accuracy="6", #6 is region level. min_upload_date=str(mintime), max_upload_date=str(maxtime)) #we want to catch these failures somehow and keep going. time.sleep(1) fapi.testFailure(rsp) total_images = rsp.photos[0]['total']; null_test = int(total_images); #want to make sure this won't crash later on for some reason null_test = float(total_images); print '\nnumimgs: ' + total_images print 'mintime: ' + str(mintime) + ' maxtime: ' + str(maxtime) + ' timeskip: ' + str(maxtime - mintime) if( int(total_images) > desired_photos ): print 'too many photos in block, reducing maxtime' upper_bound = maxtime maxtime = (lower_bound + maxtime) / 2 #midpoint between current value and lower bound. if( int(total_images) < desired_photos): print 'too few photos in block, increasing maxtime' lower_bound = maxtime maxtime = (upper_bound + maxtime) / 2 print 'Lower bound is ' + str(datetime.fromtimestamp(lower_bound)) print 'Upper bound is ' + str(datetime.fromtimestamp(upper_bound)) if( int(total_images) > 0): #only if we're not in a degenerate case keep_going = keep_going - 1 else: upper_bound = upper_bound + timeskip; except KeyboardInterrupt: print('Keyboard exception while querying for images, exiting\n') raise except: print sys.exc_info()[0] #print type(inst) # the exception instance #print inst.args # arguments stored in .args #print inst # __str__ allows args to printed directly print ('Exception encountered while querying for images\n') #end of while binary search print 'finished binary search' return([mintime,maxtime,total_images,rsp]) ########################################################################### # Modify this section to reflect your data and specific search ########################################################################### # flickr auth information: # change these to your flickr api keys and secret flickrAPIKey = "<KEY>" # API key flickrSecret = "<KEY>" # shared "secret" rootpath = "../data/" #where do you want the data desired_photos = 1000 #how many photos do you want to try and get query_file_name = 'query.dat' #The file to get the queries from #query_file_name = 'place_rec_queries_fall08.txt' query_file = open(query_file_name, 'r') #aggregate all of the positive and negative queries together. pos_queries = [] #an empty list neg_queries = '' #a string num_queries = 0 for line in query_file: if line[0] != '#' and len(line) > 1: #line end character is 2 long? print line[0:len(line)-1] if line[0] != '-': pos_queries = pos_queries + [line[0:len(line)-1]] num_queries = num_queries + 1 if line[0] == '-': neg_queries = neg_queries + ' ' + line[0:len(line)-1] query_file.close() print 'positive queries: ' print pos_queries print 'negative queries: ' + neg_queries print 'num_queries = ' + str(num_queries) #this is the desired number of photos in each block # make a new FlickrAPI instance fapi = FlickrAPI(flickrAPIKey, flickrSecret) for current_tag in range(0, num_queries): print('TOP OF LOOP') # change this to the location where you want to put your output file try: stats = os.stat(rootpath) except OSError: os.mkdir(rootpath) outpath = rootpath+pos_queries[current_tag]+'/' try: os.mkdir(outpath) except OSError: shutil.rmtree(outpath,True) os.mkdir(outpath) out_file = open(rootpath + pos_queries[current_tag] + '.txt','w') ########################################################################### #form the query string. query_string = pos_queries[current_tag] + ' ' + neg_queries print '\n\nquery_string is ' + query_string total_images_queried = 0; [mintime,maxtime,total_images,rsp] = DoSearch(fapi,query_string,desired_photos) print('GETTING TOTATL IMAGES:'+str(total_images)) s = '\nmintime: ' + str(mintime) + ' maxtime: ' + str(maxtime) print s out_file.write(s + '\n') i = getattr(rsp,'photos',None) if i: s = 'numimgs: ' + total_images print s out_file.write(s + '\n') current_image_num = 1; num = 4 # CHANGE THIS BACK int(rsp.photos[0]['pages']) s = 'total pages: ' + str(num) print s out_file.write(s + '\n') #only visit 16 pages max, to try and avoid the dreaded duplicate bug #16 pages = 4000 images, should be duplicate safe. Most interesting pictures will be taken. num_visit_pages = min(16,num) s = 'visiting only ' + str(num_visit_pages) + ' pages ( up to ' + str(num_visit_pages * 250) + ' images)' print s out_file.write(s + '\n') total_images_queried = total_images_queried + min((num_visit_pages * 250), int(total_images)) #print 'stopping before page ' + str(int(math.ceil(num/3) + 1)) + '\n' pagenum = 1; counter = -1 while( pagenum <= num_visit_pages ): #for pagenum in range(1, num_visit_pages + 1): #page one is searched twice print ' page number ' + str(pagenum) try: print("PAGE") print(pagenum) # WARNING THIS QUERY HAS TO MATCH THE SEARCH QUERY!!!! rsp = fapi.photos_search(api_key=flickrAPIKey, ispublic="1", media="photos", per_page="250", page=str(pagenum), has_geo = "0", text=query_string, #extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views", #accuracy="6", #6 is region level. min_upload_date=str(1121832000),#mintime), max_upload_date=str(1192165200))#maxtime)) #rsp = fapi.photos_search(api_key=flickrAPIKey, # ispublic="1", # media="photos", # per_page="250", # page='0', #str(pagenum), # sort="interestingness-desc", # has_geo = "0", #bbox="-180, -90, 180, 90", # text=query_string, # #accuracy="6", #6 is region level. most things seem 10 or better. # extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views", # min_upload_date=str(mintime), # max_upload_date=str(maxtime)) ##min_taken_date=str(datetime.fromtimestamp(mintime)), ##max_taken_date=str(datetime.fromtimestamp(maxtime))) time.sleep(1) fapi.testFailure(rsp) except KeyboardInterrupt: print('Keyboard exception while querying for images, exiting\n') raise except: print sys.exc_info()[0] #print type(inst) # the exception instance #print inst.args # arguments stored in .args #print inst # __str__ allows args to printed directly print ('Exception encountered while querying for images\n') else: print('got a response') # and print them k = getattr(rsp,'photos',None) if k: print('In K') m = getattr(rsp.photos[0],'photo',None) if m: print('In M') for b in rsp.photos[0].photo: print('In b') if b!=None: counter = counter + 1 ##print(http://farm{farm-id}.static.flickr.com/{server-id}/{id}_{secret}.jpg) myurl = 'http://farm'+b['farm']+".static.flickr.com/"+b['server']+"/"+b['id']+"_"+b['secret']+'.jpg' fname = outpath+pos_queries[current_tag]+str(counter)+'.jpg' #b['id']+"_"+b['secret']+'.jpg' print(myurl) print(fname) mycurl = pycurl.Curl() mycurl.setopt(pycurl.URL, str(myurl)) myfile = open(fname,"wb") mycurl.setopt(pycurl.WRITEDATA, myfile) mycurl.setopt(pycurl.FOLLOWLOCATION, 1) mycurl.setopt(pycurl.MAXREDIRS, 5) mycurl.setopt(pycurl.NOSIGNAL, 1) mycurl.perform() mycurl.close() myfile.close() out_file.write('URL: '+myurl+'\n') out_file.write('File: '+ fname+'\n') out_file.write('photo: ' + b['id'] + ' ' + b['secret'] + ' ' + b['server'] + '\n') out_file.write('owner: ' + b['owner'] + '\n') out_file.write('title: ' + b['title'].encode("ascii","replace") + '\n') out_file.write('originalsecret: ' + b['originalsecret'] + '\n') out_file.write('originalformat: ' + b['originalformat'] + '\n') out_file.write('o_height: ' + b['o_height'] + '\n') out_file.write('o_width: ' + b['o_width'] + '\n') out_file.write('datetaken: ' + b['datetaken'].encode("ascii","replace") + '\n') out_file.write('dateupload: ' + b['dateupload'].encode("ascii","replace") + '\n') out_file.write('tags: ' + b['tags'].encode("ascii","replace") + '\n') out_file.write('license: ' + b['license'].encode("ascii","replace") + '\n') out_file.write('latitude: ' + b['latitude'].encode("ascii","replace") + '\n') out_file.write('longitude: ' + b['longitude'].encode("ascii","replace") + '\n') out_file.write('accuracy: ' + b['accuracy'].encode("ascii","replace") + '\n') out_file.write('views: ' + b['views'] + '\n') out_file.write('interestingness: ' + str(current_image_num) + ' out of ' + str(total_images) + '\n'); out_file.write('\n') current_image_num = current_image_num + 1; print('') pagenum = pagenum + 1; #this is in the else exception block. Itwon't increment for a failure. #this block is indented such that it will only run if there are no exceptions #in the original query. That means if there are exceptions, mintime won't be incremented #and it will try again timeskip = maxtime - mintime #used for initializing next binary search mintime = maxtime out_file.write('Total images queried: ' + str(total_images_queried) + '\n') out_file.close
en
0.718748
#!/usr/bin/python # # So this script is in a bit of a hack state right now. # This script reads # # # # Graciously copied and modified from: # http://graphics.cs.cmu.edu/projects/im2gps/flickr_code.html #Image querying script written by <NAME>, #and extended heavily James Hays #9/26/2007 added dynamic timeslices to query more efficiently. #8/18/2008 added new fields and set maximum time slice. #8/19/2008 this is a much simpler function which gets ALL geotagged photos of # sufficient accuracy. No queries, no negative constraints. # divides up the query results into multiple files # 1/5/2009 # now uses date_taken instead of date_upload to get more diverse blocks of images # 1/13/2009 - uses the original im2gps keywords, not as negative constraints though #30 second time out on sockets before they throw #an exception. I've been having trouble with urllib.urlopen hanging in the #flickr API. This will show up as exceptions.IOError. #the time out needs to be pretty long, it seems, because the flickr servers can be slow #to respond to our big searches. #returns a query and the search times to attempt to get a desired number of photos #this needs serious refactoring -KAS # number of seconds to skip per query #timeskip = 62899200 #two years #timeskip = 604800 #one week #two days #timeskip = 86400 #one day #timeskip = 3600 #one hour #timeskip = 2257 #for resuming previous query #mintime = 1121832000 #from im2gps #mintime = 1167407788 # resume crash england #mintime = 1177828976 #resume crash japan #mintime = 1187753798 #resume crash greece #resume crash WashingtonDC #10/12/2007, at the end of im2gps queries #new approach - adjust maxtime until we get the desired number of images #within a block. We'll need to keep upper bounds and lower #lower bound is well defined (mintime), but upper bound is not. We can't #search all the way from endtime. #lower bound OF the upper time limit. must be at least 15 minutes or zero results #upper bound of the upper time limit #search stops after a fixed number of iterations #bbox="-180, -90, 180, 90", #6 is region level. #we want to catch these failures somehow and keep going. #want to make sure this won't crash later on for some reason #midpoint between current value and lower bound. #only if we're not in a degenerate case #print type(inst) # the exception instance #print inst.args # arguments stored in .args #print inst # __str__ allows args to printed directly #end of while binary search ########################################################################### # Modify this section to reflect your data and specific search ########################################################################### # flickr auth information: # change these to your flickr api keys and secret # API key # shared "secret" #where do you want the data #how many photos do you want to try and get #The file to get the queries from #query_file_name = 'place_rec_queries_fall08.txt' #aggregate all of the positive and negative queries together. #an empty list #a string #line end character is 2 long? #this is the desired number of photos in each block # make a new FlickrAPI instance # change this to the location where you want to put your output file ########################################################################### #form the query string. # CHANGE THIS BACK int(rsp.photos[0]['pages']) #only visit 16 pages max, to try and avoid the dreaded duplicate bug #16 pages = 4000 images, should be duplicate safe. Most interesting pictures will be taken. #print 'stopping before page ' + str(int(math.ceil(num/3) + 1)) + '\n' #for pagenum in range(1, num_visit_pages + 1): #page one is searched twice # WARNING THIS QUERY HAS TO MATCH THE SEARCH QUERY!!!! #extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views", #accuracy="6", #6 is region level. #mintime), #maxtime)) #rsp = fapi.photos_search(api_key=flickrAPIKey, # ispublic="1", # media="photos", # per_page="250", # page='0', #str(pagenum), # sort="interestingness-desc", # has_geo = "0", #bbox="-180, -90, 180, 90", # text=query_string, # #accuracy="6", #6 is region level. most things seem 10 or better. # extras = "tags, original_format, license, geo, date_taken, date_upload, o_dims, views", # min_upload_date=str(mintime), # max_upload_date=str(maxtime)) ##min_taken_date=str(datetime.fromtimestamp(mintime)), ##max_taken_date=str(datetime.fromtimestamp(maxtime))) #print type(inst) # the exception instance #print inst.args # arguments stored in .args #print inst # __str__ allows args to printed directly # and print them ##print(http://farm{farm-id}.static.flickr.com/{server-id}/{id}_{secret}.jpg) #b['id']+"_"+b['secret']+'.jpg' #this is in the else exception block. Itwon't increment for a failure. #this block is indented such that it will only run if there are no exceptions #in the original query. That means if there are exceptions, mintime won't be incremented #and it will try again #used for initializing next binary search
2.580182
3
Chapter04/python/2.0.0/com/sparksamples/util.py
quguiliang/Machine-Learning-with-Spark-Second-Edition
112
9848
<gh_stars>100-1000 import os import sys from pyspark.sql.types import * PATH = "/home/ubuntu/work/ml-resources/spark-ml/data" SPARK_HOME = "/home/ubuntu/work/spark-2.0.0-bin-hadoop2.7/" os.environ['SPARK_HOME'] = SPARK_HOME sys.path.append(SPARK_HOME + "/python") from pyspark import SparkContext from pyspark import SparkConf from pyspark.sql import SparkSession conf = SparkConf().setAppName("First Spark App").setMaster("local") sc = SparkContext(conf=conf) spark = SparkSession(sc) def get_user_data(): custom_schema = StructType([ StructField("no", StringType(), True), StructField("age", IntegerType(), True), StructField("gender", StringType(), True), StructField("occupation", StringType(), True), StructField("zipCode", StringType(), True) ]) from pyspark.sql import SQLContext from pyspark.sql.types import * sql_context = SQLContext(sc) user_df = sql_context.read \ .format('com.databricks.spark.csv') \ .options(header='false', delimiter='|') \ .load("%s/ml-100k/u.user" % PATH, schema = custom_schema) return user_df def get_movie_data_df(): custom_schema = StructType([ StructField("no", StringType(), True), StructField("moviename", StringType(), True), StructField("date", StringType(), True), StructField("f1", StringType(), True), StructField("url", StringType(), True), StructField("f2", IntegerType(), True), StructField("f3", IntegerType(), True), StructField("f4", IntegerType(), True), StructField("f5", IntegerType(), True), StructField("f6", IntegerType(), True), StructField("f7", IntegerType(), True), StructField("f8", IntegerType(), True), StructField("f9", IntegerType(), True), StructField("f10", IntegerType(), True), StructField("f11", IntegerType(), True), StructField("f12", IntegerType(), True), StructField("f13", IntegerType(), True), StructField("f14", IntegerType(), True), StructField("f15", IntegerType(), True), StructField("f16", IntegerType(), True), StructField("f17", IntegerType(), True), StructField("f18", IntegerType(), True), StructField("f19", IntegerType(), True) ]) from pyspark.sql import SQLContext from pyspark.sql.types import * sql_context = SQLContext(sc) movie_df = sql_context.read \ .format('com.databricks.spark.csv') \ .options(header='false', delimiter='|') \ .load("%s/ml-100k/u.item" % PATH, schema = custom_schema) return movie_df def get_movie_data(): return sc.textFile("%s/ml-100k/u.item" % PATH) def get_rating_data(): return sc.textFile("%s/ml-100k/u.data" % PATH)
import os import sys from pyspark.sql.types import * PATH = "/home/ubuntu/work/ml-resources/spark-ml/data" SPARK_HOME = "/home/ubuntu/work/spark-2.0.0-bin-hadoop2.7/" os.environ['SPARK_HOME'] = SPARK_HOME sys.path.append(SPARK_HOME + "/python") from pyspark import SparkContext from pyspark import SparkConf from pyspark.sql import SparkSession conf = SparkConf().setAppName("First Spark App").setMaster("local") sc = SparkContext(conf=conf) spark = SparkSession(sc) def get_user_data(): custom_schema = StructType([ StructField("no", StringType(), True), StructField("age", IntegerType(), True), StructField("gender", StringType(), True), StructField("occupation", StringType(), True), StructField("zipCode", StringType(), True) ]) from pyspark.sql import SQLContext from pyspark.sql.types import * sql_context = SQLContext(sc) user_df = sql_context.read \ .format('com.databricks.spark.csv') \ .options(header='false', delimiter='|') \ .load("%s/ml-100k/u.user" % PATH, schema = custom_schema) return user_df def get_movie_data_df(): custom_schema = StructType([ StructField("no", StringType(), True), StructField("moviename", StringType(), True), StructField("date", StringType(), True), StructField("f1", StringType(), True), StructField("url", StringType(), True), StructField("f2", IntegerType(), True), StructField("f3", IntegerType(), True), StructField("f4", IntegerType(), True), StructField("f5", IntegerType(), True), StructField("f6", IntegerType(), True), StructField("f7", IntegerType(), True), StructField("f8", IntegerType(), True), StructField("f9", IntegerType(), True), StructField("f10", IntegerType(), True), StructField("f11", IntegerType(), True), StructField("f12", IntegerType(), True), StructField("f13", IntegerType(), True), StructField("f14", IntegerType(), True), StructField("f15", IntegerType(), True), StructField("f16", IntegerType(), True), StructField("f17", IntegerType(), True), StructField("f18", IntegerType(), True), StructField("f19", IntegerType(), True) ]) from pyspark.sql import SQLContext from pyspark.sql.types import * sql_context = SQLContext(sc) movie_df = sql_context.read \ .format('com.databricks.spark.csv') \ .options(header='false', delimiter='|') \ .load("%s/ml-100k/u.item" % PATH, schema = custom_schema) return movie_df def get_movie_data(): return sc.textFile("%s/ml-100k/u.item" % PATH) def get_rating_data(): return sc.textFile("%s/ml-100k/u.data" % PATH)
none
1
2.716019
3
safemasks/resources/rest/router.py
Safemasks/safemasks-app
1
9849
""" """ from rest_framework import routers from safemasks.resources.rest.serializers import SupplierViewSet, TrustedSupplierViewSet # Routers provide an easy way of automatically determining the URL conf. ROUTER = routers.DefaultRouter() ROUTER.register(r"suppliers", SupplierViewSet, "suppliers") ROUTER.register(r"suppliers-trusted", TrustedSupplierViewSet, "suppliers-trusted")
""" """ from rest_framework import routers from safemasks.resources.rest.serializers import SupplierViewSet, TrustedSupplierViewSet # Routers provide an easy way of automatically determining the URL conf. ROUTER = routers.DefaultRouter() ROUTER.register(r"suppliers", SupplierViewSet, "suppliers") ROUTER.register(r"suppliers-trusted", TrustedSupplierViewSet, "suppliers-trusted")
en
0.449923
# Routers provide an easy way of automatically determining the URL conf.
1.862375
2
src/zope/testrunner/formatter.py
jamesjer/zope.testrunner
1
9850
<reponame>jamesjer/zope.testrunner<filename>src/zope/testrunner/formatter.py ############################################################################## # # Copyright (c) 2004-2008 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Output formatting. """ from __future__ import print_function try: from collections.abc import MutableMapping except ImportError: from collections import MutableMapping from contextlib import contextmanager import doctest import os import re import sys import tempfile import traceback from datetime import datetime, timedelta from zope.testrunner.exceptions import DocTestFailureException try: unicode except NameError: unicode = str doctest_template = """ File "%s", line %s, in %s %s Want: %s Got: %s """ class OutputFormatter(object): """Test runner output formatter.""" # Implementation note: be careful about printing stuff to sys.stderr. # It is used for interprocess communication between the parent and the # child test runner, when you run some test layers in a subprocess. # resume_layer() reasigns sys.stderr for this reason, but be careful # and don't store the original one in __init__ or something. max_width = 80 def __init__(self, options): self.options = options self.last_width = 0 self.compute_max_width() progress = property(lambda self: self.options.progress) verbose = property(lambda self: self.options.verbose) in_subprocess = property( lambda self: ( self.options.resume_layer is not None and self.options.processes > 1)) def compute_max_width(self): """Try to determine the terminal width.""" # Note that doing this every time is more test friendly. self.max_width = tigetnum('cols', self.max_width) def getShortDescription(self, test, room): """Return a description of a test that fits in ``room`` characters.""" room -= 1 s = str(test) if len(s) > room: pos = s.find(" (") if pos >= 0: w = room - (pos + 5) if w < 1: # first portion (test method name) is too long s = s[:room-3] + "..." else: pre = s[:pos+2] post = s[-w:] s = "%s...%s" % (pre, post) else: w = room - 4 s = '... ' + s[-w:] return ' ' + s[:room] def info(self, message): """Print an informative message.""" print(message) def info_suboptimal(self, message): """Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. """ print(message) def error(self, message): """Report an error.""" print(message) def error_with_banner(self, message): """Report an error with a big ASCII banner.""" print() print('*'*70) self.error(message) print('*'*70) print() def profiler_stats(self, stats): """Report profiler stats.""" stats.print_stats(50) def import_errors(self, import_errors): """Report test-module import errors (if any).""" if import_errors: print("Test-module import failures:") for error in import_errors: self.print_traceback("Module: %s\n" % error.module, error.exc_info), print() def tests_with_errors(self, errors): """Report names of tests with errors (if any).""" if errors: print() print("Tests with errors:") for test, exc_info in errors: print(" ", test) def tests_with_failures(self, failures): """Report names of tests with failures (if any).""" if failures: print() print("Tests with failures:") for test, exc_info in failures: print(" ", test) def modules_with_import_problems(self, import_errors): """Report names of modules with import problems (if any).""" if import_errors: print() print("Test-modules with import problems:") for test in import_errors: print(" " + test.module) def format_seconds(self, n_seconds): """Format a time in seconds.""" if n_seconds >= 60: n_minutes, n_seconds = divmod(n_seconds, 60) return "%d minutes %.3f seconds" % (n_minutes, n_seconds) else: return "%.3f seconds" % n_seconds def format_seconds_short(self, n_seconds): """Format a time in seconds (short version).""" return "%.3f s" % n_seconds def summary(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of a single test layer.""" print(" Ran %s tests with %s failures, %s errors and " "%s skipped in %s." % (n_tests, n_failures, n_errors, n_skipped, self.format_seconds(n_seconds))) def totals(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of all layers.""" print("Total: %s tests, %s failures, %s errors and %s skipped in %s." % (n_tests, n_failures, n_errors, n_skipped, self.format_seconds(n_seconds))) def list_of_tests(self, tests, layer_name): """Report a list of test names.""" print("Listing %s tests:" % layer_name) for test in tests: print(' ', test) def garbage(self, garbage): """Report garbage generated by tests.""" if garbage: print("Tests generated new (%d) garbage:" % len(garbage)) print(garbage) def test_garbage(self, test, garbage): """Report garbage generated by a test.""" if garbage: print("The following test left garbage:") print(test) print(garbage) def test_threads(self, test, new_threads): """Report threads left behind by a test.""" if new_threads: print("The following test left new threads behind:") print(test) print("New thread(s):", new_threads) def refcounts(self, rc, prev): """Report a change in reference counts.""" print(" sys refcount=%-8d change=%-6d" % (rc, rc - prev)) def detailed_refcounts(self, track, rc, prev): """Report a change in reference counts, with extra detail.""" print((" sum detail refcount=%-8d" " sys refcount=%-8d" " change=%-6d" % (track.n, rc, rc - prev))) track.output() def start_set_up(self, layer_name): """Report that we're setting up a layer. The next output operation should be stop_set_up(). """ print(" Set up %s" % layer_name, end=' ') sys.stdout.flush() def stop_set_up(self, seconds): """Report that we've set up a layer. Should be called right after start_set_up(). """ print("in %s." % self.format_seconds(seconds)) def start_tear_down(self, layer_name): """Report that we're tearing down a layer. The next output operation should be stop_tear_down() or tear_down_not_supported(). """ print(" Tear down %s" % layer_name, end=' ') sys.stdout.flush() def stop_tear_down(self, seconds): """Report that we've tore down a layer. Should be called right after start_tear_down(). """ print("in %s." % self.format_seconds(seconds)) def tear_down_not_supported(self): """Report that we could not tear down a layer. Should be called right after start_tear_down(). """ print("... not supported") def start_test(self, test, tests_run, total_tests): """Report that we're about to run a test. The next output operation should be test_success(), test_error(), or test_failure(). """ self.test_width = 0 if self.progress: if self.last_width: sys.stdout.write('\r' + (' ' * self.last_width) + '\r') s = " %d/%d (%.1f%%)" % (tests_run, total_tests, tests_run * 100.0 / total_tests) sys.stdout.write(s) self.test_width += len(s) if self.verbose == 1: room = self.max_width - self.test_width - 1 s = self.getShortDescription(test, room) sys.stdout.write(s) self.test_width += len(s) elif self.verbose == 1: sys.stdout.write('.' * test.countTestCases()) elif self.in_subprocess: sys.stdout.write('.' * test.countTestCases()) # Give the parent process a new line so it sees the progress # in a timely manner. sys.stdout.write('\n') if self.verbose > 1: s = str(test) sys.stdout.write(' ') sys.stdout.write(s) self.test_width += len(s) + 1 sys.stdout.flush() def test_success(self, test, seconds): """Report that a test was successful. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: s = " (%s)" % self.format_seconds_short(seconds) sys.stdout.write(s) self.test_width += len(s) + 1 def test_skipped(self, test, reason): """Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: s = " (skipped: %s)" % reason elif self.verbose > 1: s = " (skipped)" else: return sys.stdout.write(s) self.test_width += len(s) + 1 def test_error(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that an error occurred while running a test. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: print(" (%s)" % self.format_seconds_short(seconds)) print() self.print_traceback("Error in test %s" % test, exc_info) self.print_std_streams(stdout, stderr) self.test_width = self.last_width = 0 def test_failure(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that a test failed. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: print(" (%s)" % self.format_seconds_short(seconds)) print() self.print_traceback("Failure in test %s" % test, exc_info) self.print_std_streams(stdout, stderr) self.test_width = self.last_width = 0 def print_traceback(self, msg, exc_info): """Report an error with a traceback.""" print() print(msg) print(self.format_traceback(exc_info)) def print_std_streams(self, stdout, stderr): """Emit contents of buffered standard streams.""" if stdout: sys.stdout.write("Stdout:\n") sys.stdout.write(stdout) if not stdout.endswith("\n"): sys.stdout.write("\n") sys.stdout.write("\n") if stderr: sys.stderr.write("Stderr:\n") sys.stderr.write(stderr) if not stderr.endswith("\n"): sys.stderr.write("\n") sys.stderr.write("\n") def format_traceback(self, exc_info): """Format the traceback.""" v = exc_info[1] if isinstance(v, DocTestFailureException): tb = v.args[0] elif isinstance(v, doctest.DocTestFailure): tb = doctest_template % ( v.test.filename, v.test.lineno + v.example.lineno + 1, v.test.name, v.example.source, v.example.want, v.got, ) else: tb = "".join(traceback.format_exception(*exc_info)) return tb def stop_test(self, test): """Clean up the output state after a test.""" if self.progress: self.last_width = self.test_width elif self.verbose > 1: print() sys.stdout.flush() def stop_tests(self): """Clean up the output state after a collection of tests.""" if self.progress and self.last_width: sys.stdout.write('\r' + (' ' * self.last_width) + '\r') if self.verbose == 1 or self.progress: print() def tigetnum(attr, default=None): """Return a value from the terminfo database. Terminfo is used on Unix-like systems to report various terminal attributes (such as width, height or the number of supported colors). Returns ``default`` when the ``curses`` module is not available, or when sys.stdout is not a terminal. """ try: import curses except ImportError: # avoid reimporting a broken module in python 2.3 sys.modules['curses'] = None else: # If sys.stdout is not a real file object (e.g. in unit tests that # use various wrappers), you get an error, different depending on # Python version: expected_exceptions = (curses.error, TypeError, AttributeError) if sys.version_info >= (3,): import io expected_exceptions += (io.UnsupportedOperation, ) try: curses.setupterm() except expected_exceptions: # You get curses.error when $TERM is set to an unknown name pass else: try: return curses.tigetnum(attr) except expected_exceptions: # You get TypeError on PyPy3 due to a bug: # https://bitbucket.org/pypy/pypy/issue/2016/pypy3-cursestigetnum-raises-ctype pass return default def terminal_has_colors(): """Determine whether the terminal supports colors. Some terminals (e.g. the emacs built-in one) don't. """ return tigetnum('colors', -1) >= 8 class ColorfulOutputFormatter(OutputFormatter): """Output formatter that uses ANSI color codes. Like syntax highlighting in your text editor, colorizing test failures helps the developer. """ # These colors are carefully chosen to have enough contrast # on terminals with both black and white background. colorscheme = {'normal': 'normal', 'default': 'default', 'info': 'normal', 'suboptimal-behaviour': 'magenta', 'error': 'brightred', 'number': 'green', 'slow-test': 'brightmagenta', 'ok-number': 'green', 'error-number': 'brightred', 'filename': 'lightblue', 'lineno': 'lightred', 'testname': 'lightcyan', 'failed-example': 'cyan', 'expected-output': 'green', 'actual-output': 'red', 'character-diffs': 'magenta', 'diff-chunk': 'magenta', 'exception': 'red', 'skipped': 'brightyellow', } # Map prefix character to color in diff output. This handles ndiff and # udiff correctly, but not cdiff. In cdiff we ought to highlight '!' as # expected-output until we see a '-', then highlight '!' as actual-output, # until we see a '*', then switch back to highlighting '!' as # expected-output. Nevertheless, coloried cdiffs are reasonably readable, # so I'm not going to fix this. # -- mgedmin diff_color = {'-': 'expected-output', '+': 'actual-output', '?': 'character-diffs', '@': 'diff-chunk', '*': 'diff-chunk', '!': 'actual-output', } prefixes = [('dark', '0;'), ('light', '1;'), ('bright', '1;'), ('bold', '1;'), ] colorcodes = {'default': 0, 'normal': 0, 'black': 30, 'red': 31, 'green': 32, 'brown': 33, 'yellow': 33, 'blue': 34, 'magenta': 35, 'cyan': 36, 'grey': 37, 'gray': 37, 'white': 37} slow_test_threshold = 10.0 # seconds def color_code(self, color): """Convert a color description (e.g. 'lightred') to a terminal code.""" prefix_code = '' for prefix, code in self.prefixes: if color.startswith(prefix): color = color[len(prefix):] prefix_code = code break color_code = self.colorcodes[color] return '\033[%s%sm' % (prefix_code, color_code) def color(self, what): """Pick a named color from the color scheme""" return self.color_code(self.colorscheme[what]) def colorize(self, what, message, normal='normal'): """Wrap message in color.""" return self.color(what) + message + self.color(normal) def error_count_color(self, n): """Choose a color for the number of errors.""" if n: return self.color('error-number') else: return self.color('ok-number') def skip_count_color(self, n): """Choose a color for the number of skipped tests.""" if n: return self.color('skipped') else: return self.color('ok-number') def test_skipped(self, test, reason): """Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: s = " (%sskipped: %s%s)" % ( self.color('skipped'), reason, self.color('info')) elif self.verbose > 1: s = " (%sskipped%s)" % ( self.color('skipped'), self.color('info')) else: return sys.stdout.write(s) self.test_width += len(s) + 1 def info(self, message): """Print an informative message.""" print(self.colorize('info', message)) def info_suboptimal(self, message): """Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. """ print(self.colorize('suboptimal-behaviour', message)) def error(self, message): """Report an error.""" print(self.colorize('error', message)) def error_with_banner(self, message): """Report an error with a big ASCII banner.""" print() print(self.colorize('error', '*'*70)) self.error(message) print(self.colorize('error', '*'*70)) print() def tear_down_not_supported(self): """Report that we could not tear down a layer. Should be called right after start_tear_down(). """ print("...", self.colorize('suboptimal-behaviour', "not supported")) def format_seconds(self, n_seconds, normal='normal'): """Format a time in seconds.""" if n_seconds >= 60: n_minutes, n_seconds = divmod(n_seconds, 60) return "%s minutes %s seconds" % ( self.colorize('number', '%d' % n_minutes, normal), self.colorize('number', '%.3f' % n_seconds, normal)) else: return "%s seconds" % ( self.colorize('number', '%.3f' % n_seconds, normal)) def format_seconds_short(self, n_seconds): """Format a time in seconds (short version).""" if n_seconds >= self.slow_test_threshold: color = 'slow-test' else: color = 'number' return self.colorize(color, "%.3f s" % n_seconds) def summary(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results.""" sys.stdout.writelines([ self.color('info'), ' Ran ', self.color('number'), str(n_tests), self.color('info'), ' tests with ', self.error_count_color(n_failures), str(n_failures), self.color('info'), ' failures, ', self.error_count_color(n_errors), str(n_errors), self.color('info'), ' errors, ', self.skip_count_color(n_skipped), str(n_skipped), self.color('info'), ' skipped in ', self.format_seconds(n_seconds, 'info'), '.', self.color('normal'), '\n', ]) def totals(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Report totals (number of tests, failures, and errors).""" sys.stdout.writelines([ self.color('info'), 'Total: ', self.color('number'), str(n_tests), self.color('info'), ' tests, ', self.error_count_color(n_failures), str(n_failures), self.color('info'), ' failures, ', self.error_count_color(n_errors), str(n_errors), self.color('info'), ' errors, ', self.skip_count_color(n_skipped), str(n_skipped), self.color('info'), ' skipped in ', self.format_seconds(n_seconds, 'info'), '.', self.color('normal'), '\n']) def print_traceback(self, msg, exc_info): """Report an error with a traceback.""" print() print(self.colorize('error', msg)) v = exc_info[1] if isinstance(v, DocTestFailureException): self.print_doctest_failure(v.args[0]) elif isinstance(v, doctest.DocTestFailure): # I don't think these are ever used... -- mgedmin tb = self.format_traceback(exc_info) print(tb) else: tb = self.format_traceback(exc_info) self.print_colorized_traceback(tb) def print_doctest_failure(self, formatted_failure): """Report a doctest failure. ``formatted_failure`` is a string -- that's what DocTestSuite/DocFileSuite gives us. """ color_of_indented_text = 'normal' colorize_diff = False for line in formatted_failure.splitlines(): if line.startswith('File '): m = re.match(r'File "(.*)", line (\d*), in (.*)$', line) if m: filename, lineno, test = m.groups() sys.stdout.writelines([ self.color('normal'), 'File "', self.color('filename'), filename, self.color('normal'), '", line ', self.color('lineno'), lineno, self.color('normal'), ', in ', self.color('testname'), test, self.color('normal'), '\n']) else: print(line) elif line.startswith(' ') or line.strip() == '': if colorize_diff and len(line) > 4: color = self.diff_color.get( line[4], color_of_indented_text) print(self.colorize(color, line)) else: if line.strip() != '': print(self.colorize(color_of_indented_text, line)) else: print(line) else: colorize_diff = False if line.startswith('Failed example'): color_of_indented_text = 'failed-example' elif line.startswith('Expected:'): color_of_indented_text = 'expected-output' elif line.startswith('Got:'): color_of_indented_text = 'actual-output' elif line.startswith('Exception raised:'): color_of_indented_text = 'exception' elif line.startswith('Differences '): color_of_indented_text = 'normal' colorize_diff = True else: color_of_indented_text = 'normal' print(line) print() def print_colorized_traceback(self, formatted_traceback): """Report a test failure. ``formatted_traceback`` is a string. """ for line in formatted_traceback.splitlines(): if line.startswith(' File'): m = re.match(r' File "(.*)", line (\d*), in (.*)$', line) if m: filename, lineno, test = m.groups() sys.stdout.writelines([ self.color('normal'), ' File "', self.color('filename'), filename, self.color('normal'), '", line ', self.color('lineno'), lineno, self.color('normal'), ', in ', self.color('testname'), test, self.color('normal'), '\n']) else: print(line) elif line.startswith(' '): print(self.colorize('failed-example', line)) elif line.startswith('Traceback (most recent call last)'): print(line) else: print(self.colorize('exception', line)) print() class FakeTest(object): """A fake test object that only has an id.""" failureException = None def __init__(self, test_id): self._id = test_id def id(self): return self._id # Conditional imports: we don't want zope.testrunner to have a hard # dependency on subunit. try: import subunit from subunit.iso8601 import Utc subunit.StreamResultToBytes except (ImportError, AttributeError): subunit = None # testtools is a hard dependency of subunit itself, but we guard it # separately for richer error messages. try: import testtools from testtools.content import ( Content, ContentType, content_from_file, text_content, ) testtools.StreamToExtendedDecorator except (ImportError, AttributeError): testtools = None class _RunnableDecorator(object): """Permit controlling the runnable annotation on tests. This decorates a StreamResult, adding a setRunnable context manager to indicate whether a test is runnable. (A context manager is unidiomatic here, but it's just about the simplest way to stuff the relevant state through the various layers of decorators involved without accidentally affecting later test results.) """ def __init__(self, decorated): self.decorated = decorated self._runnable = True def __getattr__(self, name): return getattr(self.decorated, name) @contextmanager def setRunnable(self, runnable): orig_runnable = self._runnable try: self._runnable = runnable yield finally: self._runnable = orig_runnable def status(self, **kwargs): kwargs = dict(kwargs) kwargs['runnable'] = self._runnable self.decorated.status(**kwargs) class _SortedDict(MutableMapping, object): """A dict that always returns items in sorted order. This differs from collections.OrderedDict in that it returns items in *sorted* order, not in insertion order. We use this as a workaround for the fact that testtools.ExtendedToStreamDecorator doesn't sort the details dict when encoding it, which makes it difficult to write stable doctests for subunit v2 output. """ def __init__(self, items): self._dict = dict(items) def __getitem__(self, key): return self._dict[key] def __setitem__(self, key, value): self._dict[key] = value def __delitem__(self, key): del self._dict[key] def __iter__(self): return iter(sorted(self._dict)) def __len__(self): return len(self._dict) class SubunitOutputFormatter(object): """A subunit output formatter. This output formatter generates subunit-compatible output (see https://launchpad.net/subunit). Subunit output is essentially a stream of results of unit tests. In this formatter, non-test events (such as layer set up) are encoded as specially-tagged tests. In particular, for a layer 'foo', the fake tests related to layer setup and teardown are tagged with 'zope:layer' and are called 'foo:setUp' and 'foo:tearDown'. Any tests within layer 'foo' are tagged with 'zope:layer:foo'. Note that all tags specific to this formatter begin with 'zope:'. """ # subunit output is designed for computers, so displaying a progress bar # isn't helpful. progress = False verbose = property(lambda self: self.options.verbose) TAG_INFO_SUBOPTIMAL = 'zope:info_suboptimal' TAG_ERROR_WITH_BANNER = 'zope:error_with_banner' TAG_LAYER = 'zope:layer' TAG_IMPORT_ERROR = 'zope:import_error' TAG_PROFILER_STATS = 'zope:profiler_stats' TAG_GARBAGE = 'zope:garbage' TAG_THREADS = 'zope:threads' TAG_REFCOUNTS = 'zope:refcounts' def __init__(self, options, stream=None): if subunit is None: raise Exception('Requires subunit 0.0.11 or better') if testtools is None: raise Exception('Requires testtools 0.9.30 or better') self.options = options if stream is None: stream = sys.stdout self._stream = stream self._subunit = self._subunit_factory(self._stream) # Used to track the last layer that was set up or torn down. Either # None or (layer_name, last_touched_time). self._last_layer = None self.UTC = Utc() # Content types used in the output. self.TRACEBACK_CONTENT_TYPE = ContentType( 'text', 'x-traceback', {'language': 'python', 'charset': 'utf8'}) self.PROFILE_CONTENT_TYPE = ContentType( 'application', 'x-binary-profile') self.PLAIN_TEXT = ContentType('text', 'plain', {'charset': 'utf8'}) @classmethod def _subunit_factory(cls, stream): """Return a TestResult attached to the given stream.""" return _RunnableDecorator(subunit.TestProtocolClient(stream)) def _emit_timestamp(self, now=None): """Emit a timestamp to the subunit stream. If 'now' is not specified, use the current time on the system clock. """ if now is None: now = datetime.now(self.UTC) self._subunit.time(now) return now def _emit_fake_test(self, message, tag, details=None): """Emit a successful fake test to the subunit stream. Use this to print tagged informative messages. """ test = FakeTest(message) with self._subunit.setRunnable(False): self._subunit.startTest(test) self._subunit.tags([tag], []) self._subunit.addSuccess(test, details=details) self._subunit.stopTest(test) def _emit_error(self, error_id, tag, exc_info, runnable=False): """Emit an error to the subunit stream. Use this to pass on information about errors that occur outside of tests. """ test = FakeTest(error_id) with self._subunit.setRunnable(runnable): self._subunit.startTest(test) self._subunit.tags([tag], []) self._subunit.addError(test, exc_info) self._subunit.stopTest(test) def _emit_failure(self, failure_id, tag, exc_info): """Emit an failure to the subunit stream. Use this to pass on information about failures that occur outside of tests. """ test = FakeTest(failure_id) self._subunit.addFailure(test, exc_info) def _enter_layer(self, layer_name): """Tell subunit that we are entering a layer.""" self._subunit.tags(['zope:layer:%s' % (layer_name,)], []) def _exit_layer(self, layer_name): """Tell subunit that we are exiting a layer.""" self._subunit.tags([], ['zope:layer:%s' % (layer_name,)]) def info(self, message): """Print an informative message.""" # info() output is not relevant to actual test results. It only # says things like "Running tests" or "Tearing down left over # layers", things that are communicated already by the subunit # stream. Just suppress the info() output. pass def info_suboptimal(self, message): """Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. """ # Used _only_ to indicate running in a subprocess. self._emit_fake_test(message.strip(), self.TAG_INFO_SUBOPTIMAL) def error(self, message): """Report an error.""" # XXX: Mostly used for user errors, sometimes used for errors in the # test framework, sometimes used to record layer setUp failure (!!!). self._stream.write('%s\n' % (message,)) def error_with_banner(self, message): """Report an error with a big ASCII banner.""" # Either "Could not communicate with subprocess" # Or "Can't post-mortem debug when running a layer as a subprocess!" self._emit_fake_test(message, self.TAG_ERROR_WITH_BANNER) def profiler_stats(self, stats): """Report profiler stats.""" fd, filename = tempfile.mkstemp(prefix='zope.testrunner-') os.close(fd) try: stats.dump_stats(filename) profile_content = content_from_file( filename, content_type=self.PROFILE_CONTENT_TYPE) details = {'profiler-stats': profile_content} # Name the test 'zope:profiler_stats' just like its tag. self._emit_fake_test( self.TAG_PROFILER_STATS, self.TAG_PROFILER_STATS, details) finally: os.unlink(filename) def import_errors(self, import_errors): """Report test-module import errors (if any).""" if import_errors: for error in import_errors: self._emit_error( error.module, self.TAG_IMPORT_ERROR, error.exc_info, runnable=True) def tests_with_errors(self, errors): """Report names of tests with errors (if any). Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. """ pass def tests_with_failures(self, failures): """Report names of tests with failures (if any). Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. """ pass def modules_with_import_problems(self, import_errors): """Report names of modules with import problems (if any).""" # This is simply a summary method, and subunit output doesn't # benefit from summaries. pass def summary(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of a single test layer. Since subunit is a stream protocol format, it has no need for a summary. When the stream is finished other tools can generate a summary if so desired. """ pass def totals(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of all layers. Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. """ pass def _emit_exists(self, test): """Emit an indication that a test exists. With the v1 protocol, we just emit a fake success line. """ self._subunit.addSuccess(test) def list_of_tests(self, tests, layer_name): """Report a list of test names.""" self._enter_layer(layer_name) for test in tests: self._subunit.startTest(test) self._emit_exists(test) self._subunit.stopTest(test) self._exit_layer(layer_name) def garbage(self, garbage): """Report garbage generated by tests.""" # XXX: Really, 'garbage', 'profiler_stats' and the 'refcounts' twins # ought to add extra details to a fake test that represents the # summary information for the whole suite. However, there's no event # on output formatters for "everything is really finished, honest". -- # jml, 2010-02-14 details = {'garbage': text_content(unicode(garbage))} self._emit_fake_test(self.TAG_GARBAGE, self.TAG_GARBAGE, details) def test_garbage(self, test, garbage): """Report garbage generated by a test. Encoded in the subunit stream as a test error. Clients can filter out these tests based on the tag if they don't think garbage should fail the test run. """ # XXX: Perhaps 'test_garbage' and 'test_threads' ought to be within # the output for the actual test, appended as details to whatever # result the test gets. Not an option with the present API, as there's # no event for "no more output for this test". -- jml, 2010-02-14 self._subunit.startTest(test) self._subunit.tags([self.TAG_GARBAGE], []) self._subunit.addError( test, details={'garbage': text_content(unicode(garbage))}) self._subunit.stopTest(test) def test_threads(self, test, new_threads): """Report threads left behind by a test. Encoded in the subunit stream as a test error. Clients can filter out these tests based on the tag if they don't think left-over threads should fail the test run. """ self._subunit.startTest(test) self._subunit.tags([self.TAG_THREADS], []) self._subunit.addError( test, details={'threads': text_content(unicode(new_threads))}) self._subunit.stopTest(test) def refcounts(self, rc, prev): """Report a change in reference counts.""" details = _SortedDict({ 'sys-refcounts': text_content(str(rc)), 'changes': text_content(str(rc - prev)), }) # XXX: Emit the details dict as JSON? self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details) def detailed_refcounts(self, track, rc, prev): """Report a change in reference counts, with extra detail.""" details = _SortedDict({ 'sys-refcounts': text_content(str(rc)), 'changes': text_content(str(rc - prev)), 'track': text_content(str(track.delta)), }) self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details) def start_set_up(self, layer_name): """Report that we're setting up a layer. We do this by emitting a fake test of the form '$LAYER_NAME:setUp' and adding a tag of the form 'zope:layer:$LAYER_NAME' to the current tag context. The next output operation should be stop_set_up(). """ test = FakeTest('%s:setUp' % (layer_name,)) now = self._emit_timestamp() with self._subunit.setRunnable(False): self._subunit.startTest(test) self._subunit.tags([self.TAG_LAYER], []) self._last_layer = (layer_name, now) def stop_set_up(self, seconds): """Report that we've set up a layer. Should be called right after start_set_up(). """ layer_name, start_time = self._last_layer self._last_layer = None test = FakeTest('%s:setUp' % (layer_name,)) self._emit_timestamp(start_time + timedelta(seconds=seconds)) with self._subunit.setRunnable(False): self._subunit.addSuccess(test) self._subunit.stopTest(test) self._enter_layer(layer_name) def layer_failure(self, failure_type, exc_info): layer_name, start_time = self._last_layer self._emit_failure( '%s:%s' % (layer_name, failure_type), self.TAG_LAYER, exc_info) def start_tear_down(self, layer_name): """Report that we're tearing down a layer. We do this by emitting a fake test of the form '$LAYER_NAME:tearDown' and removing a tag of the form 'layer:$LAYER_NAME' from the current tag context. The next output operation should be stop_tear_down() or tear_down_not_supported(). """ test = FakeTest('%s:tearDown' % (layer_name,)) self._exit_layer(layer_name) now = self._emit_timestamp() with self._subunit.setRunnable(False): self._subunit.startTest(test) self._subunit.tags([self.TAG_LAYER], []) self._last_layer = (layer_name, now) def stop_tear_down(self, seconds): """Report that we've torn down a layer. Should be called right after start_tear_down(). """ layer_name, start_time = self._last_layer self._last_layer = None test = FakeTest('%s:tearDown' % (layer_name,)) self._emit_timestamp(start_time + timedelta(seconds=seconds)) with self._subunit.setRunnable(False): self._subunit.addSuccess(test) self._subunit.stopTest(test) def tear_down_not_supported(self): """Report that we could not tear down a layer. Should be called right after start_tear_down(). """ layer_name, start_time = self._last_layer self._last_layer = None test = FakeTest('%s:tearDown' % (layer_name,)) self._emit_timestamp() with self._subunit.setRunnable(False): self._subunit.addSkip(test, 'tearDown not supported') self._subunit.stopTest(test) def start_test(self, test, tests_run, total_tests): """Report that we're about to run a test. The next output operation should be test_success(), test_error(), or test_failure(). """ self._emit_timestamp() self._subunit.startTest(test) def test_success(self, test, seconds): """Report that a test was successful. Should be called right after start_test(). The next output operation should be stop_test(). """ self._emit_timestamp() self._subunit.addSuccess(test) def test_skipped(self, test, reason): """Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). """ self._subunit.addSkip(test, reason) def _exc_info_to_details(self, exc_info): """Translate 'exc_info' into a details dict usable with subunit.""" # In an ideal world, we'd use the pre-bundled 'TracebackContent' # class from testtools. However, 'OutputFormatter' contains special # logic to handle errors from doctests, so we have to use that and # manually create an object equivalent to an instance of # 'TracebackContent'. formatter = OutputFormatter(None) traceback = formatter.format_traceback(exc_info) # We have no idea if the traceback is a unicode object or a # bytestring with non-ASCII characters. We had best be careful when # handling it. if isinstance(traceback, bytes): # Assume the traceback was UTF-8-encoded, but still be careful. unicode_tb = traceback.decode('utf-8', 'replace') else: unicode_tb = traceback return _SortedDict({ 'traceback': Content( self.TRACEBACK_CONTENT_TYPE, lambda: [unicode_tb.encode('utf8')]), }) def _add_std_streams_to_details(self, details, stdout, stderr): """Add buffered standard stream contents to a subunit details dict.""" if stdout: if isinstance(stdout, bytes): stdout = stdout.decode('utf-8', 'replace') details['test-stdout'] = Content( self.PLAIN_TEXT, lambda: [stdout.encode('utf-8')]) if stderr: if isinstance(stderr, bytes): stderr = stderr.decode('utf-8', 'replace') details['test-stderr'] = Content( self.PLAIN_TEXT, lambda: [stderr.encode('utf-8')]) def test_error(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that an error occurred while running a test. Should be called right after start_test(). The next output operation should be stop_test(). """ self._emit_timestamp() details = self._exc_info_to_details(exc_info) self._add_std_streams_to_details(details, stdout, stderr) self._subunit.addError(test, details=details) def test_failure(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that a test failed. Should be called right after start_test(). The next output operation should be stop_test(). """ self._emit_timestamp() details = self._exc_info_to_details(exc_info) self._add_std_streams_to_details(details, stdout, stderr) self._subunit.addFailure(test, details=details) def stop_test(self, test): """Clean up the output state after a test.""" self._subunit.stopTest(test) def stop_tests(self): """Clean up the output state after a collection of tests.""" # subunit handles all of this itself. pass class SubunitV2OutputFormatter(SubunitOutputFormatter): """A subunit v2 output formatter.""" @classmethod def _subunit_factory(cls, stream): """Return a TestResult attached to the given stream.""" stream_result = _RunnableDecorator(subunit.StreamResultToBytes(stream)) result = testtools.ExtendedToStreamDecorator(stream_result) # Lift our decorating method up so that we can get at it easily. result.setRunnable = stream_result.setRunnable result.startTestRun() return result def error(self, message): """Report an error.""" # XXX: Mostly used for user errors, sometimes used for errors in the # test framework, sometimes used to record layer setUp failure (!!!). self._subunit.status( file_name='error', file_bytes=unicode(message).encode('utf-8'), eof=True, mime_type=repr(self.PLAIN_TEXT)) def _emit_exists(self, test): """Emit an indication that a test exists.""" now = datetime.now(self.UTC) self._subunit.status( test_id=test.id(), test_status='exists', test_tags=self._subunit.current_tags, timestamp=now)
############################################################################## # # Copyright (c) 2004-2008 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """Output formatting. """ from __future__ import print_function try: from collections.abc import MutableMapping except ImportError: from collections import MutableMapping from contextlib import contextmanager import doctest import os import re import sys import tempfile import traceback from datetime import datetime, timedelta from zope.testrunner.exceptions import DocTestFailureException try: unicode except NameError: unicode = str doctest_template = """ File "%s", line %s, in %s %s Want: %s Got: %s """ class OutputFormatter(object): """Test runner output formatter.""" # Implementation note: be careful about printing stuff to sys.stderr. # It is used for interprocess communication between the parent and the # child test runner, when you run some test layers in a subprocess. # resume_layer() reasigns sys.stderr for this reason, but be careful # and don't store the original one in __init__ or something. max_width = 80 def __init__(self, options): self.options = options self.last_width = 0 self.compute_max_width() progress = property(lambda self: self.options.progress) verbose = property(lambda self: self.options.verbose) in_subprocess = property( lambda self: ( self.options.resume_layer is not None and self.options.processes > 1)) def compute_max_width(self): """Try to determine the terminal width.""" # Note that doing this every time is more test friendly. self.max_width = tigetnum('cols', self.max_width) def getShortDescription(self, test, room): """Return a description of a test that fits in ``room`` characters.""" room -= 1 s = str(test) if len(s) > room: pos = s.find(" (") if pos >= 0: w = room - (pos + 5) if w < 1: # first portion (test method name) is too long s = s[:room-3] + "..." else: pre = s[:pos+2] post = s[-w:] s = "%s...%s" % (pre, post) else: w = room - 4 s = '... ' + s[-w:] return ' ' + s[:room] def info(self, message): """Print an informative message.""" print(message) def info_suboptimal(self, message): """Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. """ print(message) def error(self, message): """Report an error.""" print(message) def error_with_banner(self, message): """Report an error with a big ASCII banner.""" print() print('*'*70) self.error(message) print('*'*70) print() def profiler_stats(self, stats): """Report profiler stats.""" stats.print_stats(50) def import_errors(self, import_errors): """Report test-module import errors (if any).""" if import_errors: print("Test-module import failures:") for error in import_errors: self.print_traceback("Module: %s\n" % error.module, error.exc_info), print() def tests_with_errors(self, errors): """Report names of tests with errors (if any).""" if errors: print() print("Tests with errors:") for test, exc_info in errors: print(" ", test) def tests_with_failures(self, failures): """Report names of tests with failures (if any).""" if failures: print() print("Tests with failures:") for test, exc_info in failures: print(" ", test) def modules_with_import_problems(self, import_errors): """Report names of modules with import problems (if any).""" if import_errors: print() print("Test-modules with import problems:") for test in import_errors: print(" " + test.module) def format_seconds(self, n_seconds): """Format a time in seconds.""" if n_seconds >= 60: n_minutes, n_seconds = divmod(n_seconds, 60) return "%d minutes %.3f seconds" % (n_minutes, n_seconds) else: return "%.3f seconds" % n_seconds def format_seconds_short(self, n_seconds): """Format a time in seconds (short version).""" return "%.3f s" % n_seconds def summary(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of a single test layer.""" print(" Ran %s tests with %s failures, %s errors and " "%s skipped in %s." % (n_tests, n_failures, n_errors, n_skipped, self.format_seconds(n_seconds))) def totals(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of all layers.""" print("Total: %s tests, %s failures, %s errors and %s skipped in %s." % (n_tests, n_failures, n_errors, n_skipped, self.format_seconds(n_seconds))) def list_of_tests(self, tests, layer_name): """Report a list of test names.""" print("Listing %s tests:" % layer_name) for test in tests: print(' ', test) def garbage(self, garbage): """Report garbage generated by tests.""" if garbage: print("Tests generated new (%d) garbage:" % len(garbage)) print(garbage) def test_garbage(self, test, garbage): """Report garbage generated by a test.""" if garbage: print("The following test left garbage:") print(test) print(garbage) def test_threads(self, test, new_threads): """Report threads left behind by a test.""" if new_threads: print("The following test left new threads behind:") print(test) print("New thread(s):", new_threads) def refcounts(self, rc, prev): """Report a change in reference counts.""" print(" sys refcount=%-8d change=%-6d" % (rc, rc - prev)) def detailed_refcounts(self, track, rc, prev): """Report a change in reference counts, with extra detail.""" print((" sum detail refcount=%-8d" " sys refcount=%-8d" " change=%-6d" % (track.n, rc, rc - prev))) track.output() def start_set_up(self, layer_name): """Report that we're setting up a layer. The next output operation should be stop_set_up(). """ print(" Set up %s" % layer_name, end=' ') sys.stdout.flush() def stop_set_up(self, seconds): """Report that we've set up a layer. Should be called right after start_set_up(). """ print("in %s." % self.format_seconds(seconds)) def start_tear_down(self, layer_name): """Report that we're tearing down a layer. The next output operation should be stop_tear_down() or tear_down_not_supported(). """ print(" Tear down %s" % layer_name, end=' ') sys.stdout.flush() def stop_tear_down(self, seconds): """Report that we've tore down a layer. Should be called right after start_tear_down(). """ print("in %s." % self.format_seconds(seconds)) def tear_down_not_supported(self): """Report that we could not tear down a layer. Should be called right after start_tear_down(). """ print("... not supported") def start_test(self, test, tests_run, total_tests): """Report that we're about to run a test. The next output operation should be test_success(), test_error(), or test_failure(). """ self.test_width = 0 if self.progress: if self.last_width: sys.stdout.write('\r' + (' ' * self.last_width) + '\r') s = " %d/%d (%.1f%%)" % (tests_run, total_tests, tests_run * 100.0 / total_tests) sys.stdout.write(s) self.test_width += len(s) if self.verbose == 1: room = self.max_width - self.test_width - 1 s = self.getShortDescription(test, room) sys.stdout.write(s) self.test_width += len(s) elif self.verbose == 1: sys.stdout.write('.' * test.countTestCases()) elif self.in_subprocess: sys.stdout.write('.' * test.countTestCases()) # Give the parent process a new line so it sees the progress # in a timely manner. sys.stdout.write('\n') if self.verbose > 1: s = str(test) sys.stdout.write(' ') sys.stdout.write(s) self.test_width += len(s) + 1 sys.stdout.flush() def test_success(self, test, seconds): """Report that a test was successful. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: s = " (%s)" % self.format_seconds_short(seconds) sys.stdout.write(s) self.test_width += len(s) + 1 def test_skipped(self, test, reason): """Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: s = " (skipped: %s)" % reason elif self.verbose > 1: s = " (skipped)" else: return sys.stdout.write(s) self.test_width += len(s) + 1 def test_error(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that an error occurred while running a test. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: print(" (%s)" % self.format_seconds_short(seconds)) print() self.print_traceback("Error in test %s" % test, exc_info) self.print_std_streams(stdout, stderr) self.test_width = self.last_width = 0 def test_failure(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that a test failed. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: print(" (%s)" % self.format_seconds_short(seconds)) print() self.print_traceback("Failure in test %s" % test, exc_info) self.print_std_streams(stdout, stderr) self.test_width = self.last_width = 0 def print_traceback(self, msg, exc_info): """Report an error with a traceback.""" print() print(msg) print(self.format_traceback(exc_info)) def print_std_streams(self, stdout, stderr): """Emit contents of buffered standard streams.""" if stdout: sys.stdout.write("Stdout:\n") sys.stdout.write(stdout) if not stdout.endswith("\n"): sys.stdout.write("\n") sys.stdout.write("\n") if stderr: sys.stderr.write("Stderr:\n") sys.stderr.write(stderr) if not stderr.endswith("\n"): sys.stderr.write("\n") sys.stderr.write("\n") def format_traceback(self, exc_info): """Format the traceback.""" v = exc_info[1] if isinstance(v, DocTestFailureException): tb = v.args[0] elif isinstance(v, doctest.DocTestFailure): tb = doctest_template % ( v.test.filename, v.test.lineno + v.example.lineno + 1, v.test.name, v.example.source, v.example.want, v.got, ) else: tb = "".join(traceback.format_exception(*exc_info)) return tb def stop_test(self, test): """Clean up the output state after a test.""" if self.progress: self.last_width = self.test_width elif self.verbose > 1: print() sys.stdout.flush() def stop_tests(self): """Clean up the output state after a collection of tests.""" if self.progress and self.last_width: sys.stdout.write('\r' + (' ' * self.last_width) + '\r') if self.verbose == 1 or self.progress: print() def tigetnum(attr, default=None): """Return a value from the terminfo database. Terminfo is used on Unix-like systems to report various terminal attributes (such as width, height or the number of supported colors). Returns ``default`` when the ``curses`` module is not available, or when sys.stdout is not a terminal. """ try: import curses except ImportError: # avoid reimporting a broken module in python 2.3 sys.modules['curses'] = None else: # If sys.stdout is not a real file object (e.g. in unit tests that # use various wrappers), you get an error, different depending on # Python version: expected_exceptions = (curses.error, TypeError, AttributeError) if sys.version_info >= (3,): import io expected_exceptions += (io.UnsupportedOperation, ) try: curses.setupterm() except expected_exceptions: # You get curses.error when $TERM is set to an unknown name pass else: try: return curses.tigetnum(attr) except expected_exceptions: # You get TypeError on PyPy3 due to a bug: # https://bitbucket.org/pypy/pypy/issue/2016/pypy3-cursestigetnum-raises-ctype pass return default def terminal_has_colors(): """Determine whether the terminal supports colors. Some terminals (e.g. the emacs built-in one) don't. """ return tigetnum('colors', -1) >= 8 class ColorfulOutputFormatter(OutputFormatter): """Output formatter that uses ANSI color codes. Like syntax highlighting in your text editor, colorizing test failures helps the developer. """ # These colors are carefully chosen to have enough contrast # on terminals with both black and white background. colorscheme = {'normal': 'normal', 'default': 'default', 'info': 'normal', 'suboptimal-behaviour': 'magenta', 'error': 'brightred', 'number': 'green', 'slow-test': 'brightmagenta', 'ok-number': 'green', 'error-number': 'brightred', 'filename': 'lightblue', 'lineno': 'lightred', 'testname': 'lightcyan', 'failed-example': 'cyan', 'expected-output': 'green', 'actual-output': 'red', 'character-diffs': 'magenta', 'diff-chunk': 'magenta', 'exception': 'red', 'skipped': 'brightyellow', } # Map prefix character to color in diff output. This handles ndiff and # udiff correctly, but not cdiff. In cdiff we ought to highlight '!' as # expected-output until we see a '-', then highlight '!' as actual-output, # until we see a '*', then switch back to highlighting '!' as # expected-output. Nevertheless, coloried cdiffs are reasonably readable, # so I'm not going to fix this. # -- mgedmin diff_color = {'-': 'expected-output', '+': 'actual-output', '?': 'character-diffs', '@': 'diff-chunk', '*': 'diff-chunk', '!': 'actual-output', } prefixes = [('dark', '0;'), ('light', '1;'), ('bright', '1;'), ('bold', '1;'), ] colorcodes = {'default': 0, 'normal': 0, 'black': 30, 'red': 31, 'green': 32, 'brown': 33, 'yellow': 33, 'blue': 34, 'magenta': 35, 'cyan': 36, 'grey': 37, 'gray': 37, 'white': 37} slow_test_threshold = 10.0 # seconds def color_code(self, color): """Convert a color description (e.g. 'lightred') to a terminal code.""" prefix_code = '' for prefix, code in self.prefixes: if color.startswith(prefix): color = color[len(prefix):] prefix_code = code break color_code = self.colorcodes[color] return '\033[%s%sm' % (prefix_code, color_code) def color(self, what): """Pick a named color from the color scheme""" return self.color_code(self.colorscheme[what]) def colorize(self, what, message, normal='normal'): """Wrap message in color.""" return self.color(what) + message + self.color(normal) def error_count_color(self, n): """Choose a color for the number of errors.""" if n: return self.color('error-number') else: return self.color('ok-number') def skip_count_color(self, n): """Choose a color for the number of skipped tests.""" if n: return self.color('skipped') else: return self.color('ok-number') def test_skipped(self, test, reason): """Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). """ if self.verbose > 2: s = " (%sskipped: %s%s)" % ( self.color('skipped'), reason, self.color('info')) elif self.verbose > 1: s = " (%sskipped%s)" % ( self.color('skipped'), self.color('info')) else: return sys.stdout.write(s) self.test_width += len(s) + 1 def info(self, message): """Print an informative message.""" print(self.colorize('info', message)) def info_suboptimal(self, message): """Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. """ print(self.colorize('suboptimal-behaviour', message)) def error(self, message): """Report an error.""" print(self.colorize('error', message)) def error_with_banner(self, message): """Report an error with a big ASCII banner.""" print() print(self.colorize('error', '*'*70)) self.error(message) print(self.colorize('error', '*'*70)) print() def tear_down_not_supported(self): """Report that we could not tear down a layer. Should be called right after start_tear_down(). """ print("...", self.colorize('suboptimal-behaviour', "not supported")) def format_seconds(self, n_seconds, normal='normal'): """Format a time in seconds.""" if n_seconds >= 60: n_minutes, n_seconds = divmod(n_seconds, 60) return "%s minutes %s seconds" % ( self.colorize('number', '%d' % n_minutes, normal), self.colorize('number', '%.3f' % n_seconds, normal)) else: return "%s seconds" % ( self.colorize('number', '%.3f' % n_seconds, normal)) def format_seconds_short(self, n_seconds): """Format a time in seconds (short version).""" if n_seconds >= self.slow_test_threshold: color = 'slow-test' else: color = 'number' return self.colorize(color, "%.3f s" % n_seconds) def summary(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results.""" sys.stdout.writelines([ self.color('info'), ' Ran ', self.color('number'), str(n_tests), self.color('info'), ' tests with ', self.error_count_color(n_failures), str(n_failures), self.color('info'), ' failures, ', self.error_count_color(n_errors), str(n_errors), self.color('info'), ' errors, ', self.skip_count_color(n_skipped), str(n_skipped), self.color('info'), ' skipped in ', self.format_seconds(n_seconds, 'info'), '.', self.color('normal'), '\n', ]) def totals(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Report totals (number of tests, failures, and errors).""" sys.stdout.writelines([ self.color('info'), 'Total: ', self.color('number'), str(n_tests), self.color('info'), ' tests, ', self.error_count_color(n_failures), str(n_failures), self.color('info'), ' failures, ', self.error_count_color(n_errors), str(n_errors), self.color('info'), ' errors, ', self.skip_count_color(n_skipped), str(n_skipped), self.color('info'), ' skipped in ', self.format_seconds(n_seconds, 'info'), '.', self.color('normal'), '\n']) def print_traceback(self, msg, exc_info): """Report an error with a traceback.""" print() print(self.colorize('error', msg)) v = exc_info[1] if isinstance(v, DocTestFailureException): self.print_doctest_failure(v.args[0]) elif isinstance(v, doctest.DocTestFailure): # I don't think these are ever used... -- mgedmin tb = self.format_traceback(exc_info) print(tb) else: tb = self.format_traceback(exc_info) self.print_colorized_traceback(tb) def print_doctest_failure(self, formatted_failure): """Report a doctest failure. ``formatted_failure`` is a string -- that's what DocTestSuite/DocFileSuite gives us. """ color_of_indented_text = 'normal' colorize_diff = False for line in formatted_failure.splitlines(): if line.startswith('File '): m = re.match(r'File "(.*)", line (\d*), in (.*)$', line) if m: filename, lineno, test = m.groups() sys.stdout.writelines([ self.color('normal'), 'File "', self.color('filename'), filename, self.color('normal'), '", line ', self.color('lineno'), lineno, self.color('normal'), ', in ', self.color('testname'), test, self.color('normal'), '\n']) else: print(line) elif line.startswith(' ') or line.strip() == '': if colorize_diff and len(line) > 4: color = self.diff_color.get( line[4], color_of_indented_text) print(self.colorize(color, line)) else: if line.strip() != '': print(self.colorize(color_of_indented_text, line)) else: print(line) else: colorize_diff = False if line.startswith('Failed example'): color_of_indented_text = 'failed-example' elif line.startswith('Expected:'): color_of_indented_text = 'expected-output' elif line.startswith('Got:'): color_of_indented_text = 'actual-output' elif line.startswith('Exception raised:'): color_of_indented_text = 'exception' elif line.startswith('Differences '): color_of_indented_text = 'normal' colorize_diff = True else: color_of_indented_text = 'normal' print(line) print() def print_colorized_traceback(self, formatted_traceback): """Report a test failure. ``formatted_traceback`` is a string. """ for line in formatted_traceback.splitlines(): if line.startswith(' File'): m = re.match(r' File "(.*)", line (\d*), in (.*)$', line) if m: filename, lineno, test = m.groups() sys.stdout.writelines([ self.color('normal'), ' File "', self.color('filename'), filename, self.color('normal'), '", line ', self.color('lineno'), lineno, self.color('normal'), ', in ', self.color('testname'), test, self.color('normal'), '\n']) else: print(line) elif line.startswith(' '): print(self.colorize('failed-example', line)) elif line.startswith('Traceback (most recent call last)'): print(line) else: print(self.colorize('exception', line)) print() class FakeTest(object): """A fake test object that only has an id.""" failureException = None def __init__(self, test_id): self._id = test_id def id(self): return self._id # Conditional imports: we don't want zope.testrunner to have a hard # dependency on subunit. try: import subunit from subunit.iso8601 import Utc subunit.StreamResultToBytes except (ImportError, AttributeError): subunit = None # testtools is a hard dependency of subunit itself, but we guard it # separately for richer error messages. try: import testtools from testtools.content import ( Content, ContentType, content_from_file, text_content, ) testtools.StreamToExtendedDecorator except (ImportError, AttributeError): testtools = None class _RunnableDecorator(object): """Permit controlling the runnable annotation on tests. This decorates a StreamResult, adding a setRunnable context manager to indicate whether a test is runnable. (A context manager is unidiomatic here, but it's just about the simplest way to stuff the relevant state through the various layers of decorators involved without accidentally affecting later test results.) """ def __init__(self, decorated): self.decorated = decorated self._runnable = True def __getattr__(self, name): return getattr(self.decorated, name) @contextmanager def setRunnable(self, runnable): orig_runnable = self._runnable try: self._runnable = runnable yield finally: self._runnable = orig_runnable def status(self, **kwargs): kwargs = dict(kwargs) kwargs['runnable'] = self._runnable self.decorated.status(**kwargs) class _SortedDict(MutableMapping, object): """A dict that always returns items in sorted order. This differs from collections.OrderedDict in that it returns items in *sorted* order, not in insertion order. We use this as a workaround for the fact that testtools.ExtendedToStreamDecorator doesn't sort the details dict when encoding it, which makes it difficult to write stable doctests for subunit v2 output. """ def __init__(self, items): self._dict = dict(items) def __getitem__(self, key): return self._dict[key] def __setitem__(self, key, value): self._dict[key] = value def __delitem__(self, key): del self._dict[key] def __iter__(self): return iter(sorted(self._dict)) def __len__(self): return len(self._dict) class SubunitOutputFormatter(object): """A subunit output formatter. This output formatter generates subunit-compatible output (see https://launchpad.net/subunit). Subunit output is essentially a stream of results of unit tests. In this formatter, non-test events (such as layer set up) are encoded as specially-tagged tests. In particular, for a layer 'foo', the fake tests related to layer setup and teardown are tagged with 'zope:layer' and are called 'foo:setUp' and 'foo:tearDown'. Any tests within layer 'foo' are tagged with 'zope:layer:foo'. Note that all tags specific to this formatter begin with 'zope:'. """ # subunit output is designed for computers, so displaying a progress bar # isn't helpful. progress = False verbose = property(lambda self: self.options.verbose) TAG_INFO_SUBOPTIMAL = 'zope:info_suboptimal' TAG_ERROR_WITH_BANNER = 'zope:error_with_banner' TAG_LAYER = 'zope:layer' TAG_IMPORT_ERROR = 'zope:import_error' TAG_PROFILER_STATS = 'zope:profiler_stats' TAG_GARBAGE = 'zope:garbage' TAG_THREADS = 'zope:threads' TAG_REFCOUNTS = 'zope:refcounts' def __init__(self, options, stream=None): if subunit is None: raise Exception('Requires subunit 0.0.11 or better') if testtools is None: raise Exception('Requires testtools 0.9.30 or better') self.options = options if stream is None: stream = sys.stdout self._stream = stream self._subunit = self._subunit_factory(self._stream) # Used to track the last layer that was set up or torn down. Either # None or (layer_name, last_touched_time). self._last_layer = None self.UTC = Utc() # Content types used in the output. self.TRACEBACK_CONTENT_TYPE = ContentType( 'text', 'x-traceback', {'language': 'python', 'charset': 'utf8'}) self.PROFILE_CONTENT_TYPE = ContentType( 'application', 'x-binary-profile') self.PLAIN_TEXT = ContentType('text', 'plain', {'charset': 'utf8'}) @classmethod def _subunit_factory(cls, stream): """Return a TestResult attached to the given stream.""" return _RunnableDecorator(subunit.TestProtocolClient(stream)) def _emit_timestamp(self, now=None): """Emit a timestamp to the subunit stream. If 'now' is not specified, use the current time on the system clock. """ if now is None: now = datetime.now(self.UTC) self._subunit.time(now) return now def _emit_fake_test(self, message, tag, details=None): """Emit a successful fake test to the subunit stream. Use this to print tagged informative messages. """ test = FakeTest(message) with self._subunit.setRunnable(False): self._subunit.startTest(test) self._subunit.tags([tag], []) self._subunit.addSuccess(test, details=details) self._subunit.stopTest(test) def _emit_error(self, error_id, tag, exc_info, runnable=False): """Emit an error to the subunit stream. Use this to pass on information about errors that occur outside of tests. """ test = FakeTest(error_id) with self._subunit.setRunnable(runnable): self._subunit.startTest(test) self._subunit.tags([tag], []) self._subunit.addError(test, exc_info) self._subunit.stopTest(test) def _emit_failure(self, failure_id, tag, exc_info): """Emit an failure to the subunit stream. Use this to pass on information about failures that occur outside of tests. """ test = FakeTest(failure_id) self._subunit.addFailure(test, exc_info) def _enter_layer(self, layer_name): """Tell subunit that we are entering a layer.""" self._subunit.tags(['zope:layer:%s' % (layer_name,)], []) def _exit_layer(self, layer_name): """Tell subunit that we are exiting a layer.""" self._subunit.tags([], ['zope:layer:%s' % (layer_name,)]) def info(self, message): """Print an informative message.""" # info() output is not relevant to actual test results. It only # says things like "Running tests" or "Tearing down left over # layers", things that are communicated already by the subunit # stream. Just suppress the info() output. pass def info_suboptimal(self, message): """Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. """ # Used _only_ to indicate running in a subprocess. self._emit_fake_test(message.strip(), self.TAG_INFO_SUBOPTIMAL) def error(self, message): """Report an error.""" # XXX: Mostly used for user errors, sometimes used for errors in the # test framework, sometimes used to record layer setUp failure (!!!). self._stream.write('%s\n' % (message,)) def error_with_banner(self, message): """Report an error with a big ASCII banner.""" # Either "Could not communicate with subprocess" # Or "Can't post-mortem debug when running a layer as a subprocess!" self._emit_fake_test(message, self.TAG_ERROR_WITH_BANNER) def profiler_stats(self, stats): """Report profiler stats.""" fd, filename = tempfile.mkstemp(prefix='zope.testrunner-') os.close(fd) try: stats.dump_stats(filename) profile_content = content_from_file( filename, content_type=self.PROFILE_CONTENT_TYPE) details = {'profiler-stats': profile_content} # Name the test 'zope:profiler_stats' just like its tag. self._emit_fake_test( self.TAG_PROFILER_STATS, self.TAG_PROFILER_STATS, details) finally: os.unlink(filename) def import_errors(self, import_errors): """Report test-module import errors (if any).""" if import_errors: for error in import_errors: self._emit_error( error.module, self.TAG_IMPORT_ERROR, error.exc_info, runnable=True) def tests_with_errors(self, errors): """Report names of tests with errors (if any). Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. """ pass def tests_with_failures(self, failures): """Report names of tests with failures (if any). Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. """ pass def modules_with_import_problems(self, import_errors): """Report names of modules with import problems (if any).""" # This is simply a summary method, and subunit output doesn't # benefit from summaries. pass def summary(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of a single test layer. Since subunit is a stream protocol format, it has no need for a summary. When the stream is finished other tools can generate a summary if so desired. """ pass def totals(self, n_tests, n_failures, n_errors, n_seconds, n_skipped=0): """Summarize the results of all layers. Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. """ pass def _emit_exists(self, test): """Emit an indication that a test exists. With the v1 protocol, we just emit a fake success line. """ self._subunit.addSuccess(test) def list_of_tests(self, tests, layer_name): """Report a list of test names.""" self._enter_layer(layer_name) for test in tests: self._subunit.startTest(test) self._emit_exists(test) self._subunit.stopTest(test) self._exit_layer(layer_name) def garbage(self, garbage): """Report garbage generated by tests.""" # XXX: Really, 'garbage', 'profiler_stats' and the 'refcounts' twins # ought to add extra details to a fake test that represents the # summary information for the whole suite. However, there's no event # on output formatters for "everything is really finished, honest". -- # jml, 2010-02-14 details = {'garbage': text_content(unicode(garbage))} self._emit_fake_test(self.TAG_GARBAGE, self.TAG_GARBAGE, details) def test_garbage(self, test, garbage): """Report garbage generated by a test. Encoded in the subunit stream as a test error. Clients can filter out these tests based on the tag if they don't think garbage should fail the test run. """ # XXX: Perhaps 'test_garbage' and 'test_threads' ought to be within # the output for the actual test, appended as details to whatever # result the test gets. Not an option with the present API, as there's # no event for "no more output for this test". -- jml, 2010-02-14 self._subunit.startTest(test) self._subunit.tags([self.TAG_GARBAGE], []) self._subunit.addError( test, details={'garbage': text_content(unicode(garbage))}) self._subunit.stopTest(test) def test_threads(self, test, new_threads): """Report threads left behind by a test. Encoded in the subunit stream as a test error. Clients can filter out these tests based on the tag if they don't think left-over threads should fail the test run. """ self._subunit.startTest(test) self._subunit.tags([self.TAG_THREADS], []) self._subunit.addError( test, details={'threads': text_content(unicode(new_threads))}) self._subunit.stopTest(test) def refcounts(self, rc, prev): """Report a change in reference counts.""" details = _SortedDict({ 'sys-refcounts': text_content(str(rc)), 'changes': text_content(str(rc - prev)), }) # XXX: Emit the details dict as JSON? self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details) def detailed_refcounts(self, track, rc, prev): """Report a change in reference counts, with extra detail.""" details = _SortedDict({ 'sys-refcounts': text_content(str(rc)), 'changes': text_content(str(rc - prev)), 'track': text_content(str(track.delta)), }) self._emit_fake_test(self.TAG_REFCOUNTS, self.TAG_REFCOUNTS, details) def start_set_up(self, layer_name): """Report that we're setting up a layer. We do this by emitting a fake test of the form '$LAYER_NAME:setUp' and adding a tag of the form 'zope:layer:$LAYER_NAME' to the current tag context. The next output operation should be stop_set_up(). """ test = FakeTest('%s:setUp' % (layer_name,)) now = self._emit_timestamp() with self._subunit.setRunnable(False): self._subunit.startTest(test) self._subunit.tags([self.TAG_LAYER], []) self._last_layer = (layer_name, now) def stop_set_up(self, seconds): """Report that we've set up a layer. Should be called right after start_set_up(). """ layer_name, start_time = self._last_layer self._last_layer = None test = FakeTest('%s:setUp' % (layer_name,)) self._emit_timestamp(start_time + timedelta(seconds=seconds)) with self._subunit.setRunnable(False): self._subunit.addSuccess(test) self._subunit.stopTest(test) self._enter_layer(layer_name) def layer_failure(self, failure_type, exc_info): layer_name, start_time = self._last_layer self._emit_failure( '%s:%s' % (layer_name, failure_type), self.TAG_LAYER, exc_info) def start_tear_down(self, layer_name): """Report that we're tearing down a layer. We do this by emitting a fake test of the form '$LAYER_NAME:tearDown' and removing a tag of the form 'layer:$LAYER_NAME' from the current tag context. The next output operation should be stop_tear_down() or tear_down_not_supported(). """ test = FakeTest('%s:tearDown' % (layer_name,)) self._exit_layer(layer_name) now = self._emit_timestamp() with self._subunit.setRunnable(False): self._subunit.startTest(test) self._subunit.tags([self.TAG_LAYER], []) self._last_layer = (layer_name, now) def stop_tear_down(self, seconds): """Report that we've torn down a layer. Should be called right after start_tear_down(). """ layer_name, start_time = self._last_layer self._last_layer = None test = FakeTest('%s:tearDown' % (layer_name,)) self._emit_timestamp(start_time + timedelta(seconds=seconds)) with self._subunit.setRunnable(False): self._subunit.addSuccess(test) self._subunit.stopTest(test) def tear_down_not_supported(self): """Report that we could not tear down a layer. Should be called right after start_tear_down(). """ layer_name, start_time = self._last_layer self._last_layer = None test = FakeTest('%s:tearDown' % (layer_name,)) self._emit_timestamp() with self._subunit.setRunnable(False): self._subunit.addSkip(test, 'tearDown not supported') self._subunit.stopTest(test) def start_test(self, test, tests_run, total_tests): """Report that we're about to run a test. The next output operation should be test_success(), test_error(), or test_failure(). """ self._emit_timestamp() self._subunit.startTest(test) def test_success(self, test, seconds): """Report that a test was successful. Should be called right after start_test(). The next output operation should be stop_test(). """ self._emit_timestamp() self._subunit.addSuccess(test) def test_skipped(self, test, reason): """Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). """ self._subunit.addSkip(test, reason) def _exc_info_to_details(self, exc_info): """Translate 'exc_info' into a details dict usable with subunit.""" # In an ideal world, we'd use the pre-bundled 'TracebackContent' # class from testtools. However, 'OutputFormatter' contains special # logic to handle errors from doctests, so we have to use that and # manually create an object equivalent to an instance of # 'TracebackContent'. formatter = OutputFormatter(None) traceback = formatter.format_traceback(exc_info) # We have no idea if the traceback is a unicode object or a # bytestring with non-ASCII characters. We had best be careful when # handling it. if isinstance(traceback, bytes): # Assume the traceback was UTF-8-encoded, but still be careful. unicode_tb = traceback.decode('utf-8', 'replace') else: unicode_tb = traceback return _SortedDict({ 'traceback': Content( self.TRACEBACK_CONTENT_TYPE, lambda: [unicode_tb.encode('utf8')]), }) def _add_std_streams_to_details(self, details, stdout, stderr): """Add buffered standard stream contents to a subunit details dict.""" if stdout: if isinstance(stdout, bytes): stdout = stdout.decode('utf-8', 'replace') details['test-stdout'] = Content( self.PLAIN_TEXT, lambda: [stdout.encode('utf-8')]) if stderr: if isinstance(stderr, bytes): stderr = stderr.decode('utf-8', 'replace') details['test-stderr'] = Content( self.PLAIN_TEXT, lambda: [stderr.encode('utf-8')]) def test_error(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that an error occurred while running a test. Should be called right after start_test(). The next output operation should be stop_test(). """ self._emit_timestamp() details = self._exc_info_to_details(exc_info) self._add_std_streams_to_details(details, stdout, stderr) self._subunit.addError(test, details=details) def test_failure(self, test, seconds, exc_info, stdout=None, stderr=None): """Report that a test failed. Should be called right after start_test(). The next output operation should be stop_test(). """ self._emit_timestamp() details = self._exc_info_to_details(exc_info) self._add_std_streams_to_details(details, stdout, stderr) self._subunit.addFailure(test, details=details) def stop_test(self, test): """Clean up the output state after a test.""" self._subunit.stopTest(test) def stop_tests(self): """Clean up the output state after a collection of tests.""" # subunit handles all of this itself. pass class SubunitV2OutputFormatter(SubunitOutputFormatter): """A subunit v2 output formatter.""" @classmethod def _subunit_factory(cls, stream): """Return a TestResult attached to the given stream.""" stream_result = _RunnableDecorator(subunit.StreamResultToBytes(stream)) result = testtools.ExtendedToStreamDecorator(stream_result) # Lift our decorating method up so that we can get at it easily. result.setRunnable = stream_result.setRunnable result.startTestRun() return result def error(self, message): """Report an error.""" # XXX: Mostly used for user errors, sometimes used for errors in the # test framework, sometimes used to record layer setUp failure (!!!). self._subunit.status( file_name='error', file_bytes=unicode(message).encode('utf-8'), eof=True, mime_type=repr(self.PLAIN_TEXT)) def _emit_exists(self, test): """Emit an indication that a test exists.""" now = datetime.now(self.UTC) self._subunit.status( test_id=test.id(), test_status='exists', test_tags=self._subunit.current_tags, timestamp=now)
en
0.834808
############################################################################## # # Copyright (c) 2004-2008 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## Output formatting. File "%s", line %s, in %s %s Want: %s Got: %s Test runner output formatter. # Implementation note: be careful about printing stuff to sys.stderr. # It is used for interprocess communication between the parent and the # child test runner, when you run some test layers in a subprocess. # resume_layer() reasigns sys.stderr for this reason, but be careful # and don't store the original one in __init__ or something. Try to determine the terminal width. # Note that doing this every time is more test friendly. Return a description of a test that fits in ``room`` characters. # first portion (test method name) is too long Print an informative message. Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. Report an error. Report an error with a big ASCII banner. Report profiler stats. Report test-module import errors (if any). Report names of tests with errors (if any). Report names of tests with failures (if any). Report names of modules with import problems (if any). Format a time in seconds. Format a time in seconds (short version). Summarize the results of a single test layer. Summarize the results of all layers. Report a list of test names. Report garbage generated by tests. Report garbage generated by a test. Report threads left behind by a test. Report a change in reference counts. Report a change in reference counts, with extra detail. Report that we're setting up a layer. The next output operation should be stop_set_up(). Report that we've set up a layer. Should be called right after start_set_up(). Report that we're tearing down a layer. The next output operation should be stop_tear_down() or tear_down_not_supported(). Report that we've tore down a layer. Should be called right after start_tear_down(). Report that we could not tear down a layer. Should be called right after start_tear_down(). Report that we're about to run a test. The next output operation should be test_success(), test_error(), or test_failure(). # Give the parent process a new line so it sees the progress # in a timely manner. Report that a test was successful. Should be called right after start_test(). The next output operation should be stop_test(). Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). Report that an error occurred while running a test. Should be called right after start_test(). The next output operation should be stop_test(). Report that a test failed. Should be called right after start_test(). The next output operation should be stop_test(). Report an error with a traceback. Emit contents of buffered standard streams. Format the traceback. Clean up the output state after a test. Clean up the output state after a collection of tests. Return a value from the terminfo database. Terminfo is used on Unix-like systems to report various terminal attributes (such as width, height or the number of supported colors). Returns ``default`` when the ``curses`` module is not available, or when sys.stdout is not a terminal. # avoid reimporting a broken module in python 2.3 # If sys.stdout is not a real file object (e.g. in unit tests that # use various wrappers), you get an error, different depending on # Python version: # You get curses.error when $TERM is set to an unknown name # You get TypeError on PyPy3 due to a bug: # https://bitbucket.org/pypy/pypy/issue/2016/pypy3-cursestigetnum-raises-ctype Determine whether the terminal supports colors. Some terminals (e.g. the emacs built-in one) don't. Output formatter that uses ANSI color codes. Like syntax highlighting in your text editor, colorizing test failures helps the developer. # These colors are carefully chosen to have enough contrast # on terminals with both black and white background. # Map prefix character to color in diff output. This handles ndiff and # udiff correctly, but not cdiff. In cdiff we ought to highlight '!' as # expected-output until we see a '-', then highlight '!' as actual-output, # until we see a '*', then switch back to highlighting '!' as # expected-output. Nevertheless, coloried cdiffs are reasonably readable, # so I'm not going to fix this. # -- mgedmin # seconds Convert a color description (e.g. 'lightred') to a terminal code. Pick a named color from the color scheme Wrap message in color. Choose a color for the number of errors. Choose a color for the number of skipped tests. Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). Print an informative message. Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. Report an error. Report an error with a big ASCII banner. Report that we could not tear down a layer. Should be called right after start_tear_down(). Format a time in seconds. Format a time in seconds (short version). Summarize the results. Report totals (number of tests, failures, and errors). Report an error with a traceback. # I don't think these are ever used... -- mgedmin Report a doctest failure. ``formatted_failure`` is a string -- that's what DocTestSuite/DocFileSuite gives us. Report a test failure. ``formatted_traceback`` is a string. A fake test object that only has an id. # Conditional imports: we don't want zope.testrunner to have a hard # dependency on subunit. # testtools is a hard dependency of subunit itself, but we guard it # separately for richer error messages. Permit controlling the runnable annotation on tests. This decorates a StreamResult, adding a setRunnable context manager to indicate whether a test is runnable. (A context manager is unidiomatic here, but it's just about the simplest way to stuff the relevant state through the various layers of decorators involved without accidentally affecting later test results.) A dict that always returns items in sorted order. This differs from collections.OrderedDict in that it returns items in *sorted* order, not in insertion order. We use this as a workaround for the fact that testtools.ExtendedToStreamDecorator doesn't sort the details dict when encoding it, which makes it difficult to write stable doctests for subunit v2 output. A subunit output formatter. This output formatter generates subunit-compatible output (see https://launchpad.net/subunit). Subunit output is essentially a stream of results of unit tests. In this formatter, non-test events (such as layer set up) are encoded as specially-tagged tests. In particular, for a layer 'foo', the fake tests related to layer setup and teardown are tagged with 'zope:layer' and are called 'foo:setUp' and 'foo:tearDown'. Any tests within layer 'foo' are tagged with 'zope:layer:foo'. Note that all tags specific to this formatter begin with 'zope:'. # subunit output is designed for computers, so displaying a progress bar # isn't helpful. # Used to track the last layer that was set up or torn down. Either # None or (layer_name, last_touched_time). # Content types used in the output. Return a TestResult attached to the given stream. Emit a timestamp to the subunit stream. If 'now' is not specified, use the current time on the system clock. Emit a successful fake test to the subunit stream. Use this to print tagged informative messages. Emit an error to the subunit stream. Use this to pass on information about errors that occur outside of tests. Emit an failure to the subunit stream. Use this to pass on information about failures that occur outside of tests. Tell subunit that we are entering a layer. Tell subunit that we are exiting a layer. Print an informative message. # info() output is not relevant to actual test results. It only # says things like "Running tests" or "Tearing down left over # layers", things that are communicated already by the subunit # stream. Just suppress the info() output. Print an informative message about losing some of the features. For example, when you run some tests in a subprocess, you lose the ability to use the debugger. # Used _only_ to indicate running in a subprocess. Report an error. # XXX: Mostly used for user errors, sometimes used for errors in the # test framework, sometimes used to record layer setUp failure (!!!). Report an error with a big ASCII banner. # Either "Could not communicate with subprocess" # Or "Can't post-mortem debug when running a layer as a subprocess!" Report profiler stats. # Name the test 'zope:profiler_stats' just like its tag. Report test-module import errors (if any). Report names of tests with errors (if any). Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. Report names of tests with failures (if any). Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. Report names of modules with import problems (if any). # This is simply a summary method, and subunit output doesn't # benefit from summaries. Summarize the results of a single test layer. Since subunit is a stream protocol format, it has no need for a summary. When the stream is finished other tools can generate a summary if so desired. Summarize the results of all layers. Simply not supported by the subunit formatter. Fancy summary output doesn't make sense. Emit an indication that a test exists. With the v1 protocol, we just emit a fake success line. Report a list of test names. Report garbage generated by tests. # XXX: Really, 'garbage', 'profiler_stats' and the 'refcounts' twins # ought to add extra details to a fake test that represents the # summary information for the whole suite. However, there's no event # on output formatters for "everything is really finished, honest". -- # jml, 2010-02-14 Report garbage generated by a test. Encoded in the subunit stream as a test error. Clients can filter out these tests based on the tag if they don't think garbage should fail the test run. # XXX: Perhaps 'test_garbage' and 'test_threads' ought to be within # the output for the actual test, appended as details to whatever # result the test gets. Not an option with the present API, as there's # no event for "no more output for this test". -- jml, 2010-02-14 Report threads left behind by a test. Encoded in the subunit stream as a test error. Clients can filter out these tests based on the tag if they don't think left-over threads should fail the test run. Report a change in reference counts. # XXX: Emit the details dict as JSON? Report a change in reference counts, with extra detail. Report that we're setting up a layer. We do this by emitting a fake test of the form '$LAYER_NAME:setUp' and adding a tag of the form 'zope:layer:$LAYER_NAME' to the current tag context. The next output operation should be stop_set_up(). Report that we've set up a layer. Should be called right after start_set_up(). Report that we're tearing down a layer. We do this by emitting a fake test of the form '$LAYER_NAME:tearDown' and removing a tag of the form 'layer:$LAYER_NAME' from the current tag context. The next output operation should be stop_tear_down() or tear_down_not_supported(). Report that we've torn down a layer. Should be called right after start_tear_down(). Report that we could not tear down a layer. Should be called right after start_tear_down(). Report that we're about to run a test. The next output operation should be test_success(), test_error(), or test_failure(). Report that a test was successful. Should be called right after start_test(). The next output operation should be stop_test(). Report that a test was skipped. Should be called right after start_test(). The next output operation should be stop_test(). Translate 'exc_info' into a details dict usable with subunit. # In an ideal world, we'd use the pre-bundled 'TracebackContent' # class from testtools. However, 'OutputFormatter' contains special # logic to handle errors from doctests, so we have to use that and # manually create an object equivalent to an instance of # 'TracebackContent'. # We have no idea if the traceback is a unicode object or a # bytestring with non-ASCII characters. We had best be careful when # handling it. # Assume the traceback was UTF-8-encoded, but still be careful. Add buffered standard stream contents to a subunit details dict. Report that an error occurred while running a test. Should be called right after start_test(). The next output operation should be stop_test(). Report that a test failed. Should be called right after start_test(). The next output operation should be stop_test(). Clean up the output state after a test. Clean up the output state after a collection of tests. # subunit handles all of this itself. A subunit v2 output formatter. Return a TestResult attached to the given stream. # Lift our decorating method up so that we can get at it easily. Report an error. # XXX: Mostly used for user errors, sometimes used for errors in the # test framework, sometimes used to record layer setUp failure (!!!). Emit an indication that a test exists.
1.851495
2
waterApp/migrations/0011_auto_20210911_1043.py
csisarep/groundwater_dashboard
0
9851
<reponame>csisarep/groundwater_dashboard # Generated by Django 2.2 on 2021-09-11 04:58 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('waterApp', '0010_auto_20210911_1041'), ] operations = [ migrations.AlterField( model_name='gwmonitoring', name='id', field=models.BigAutoField(primary_key=True, serialize=False), ), ]
# Generated by Django 2.2 on 2021-09-11 04:58 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('waterApp', '0010_auto_20210911_1041'), ] operations = [ migrations.AlterField( model_name='gwmonitoring', name='id', field=models.BigAutoField(primary_key=True, serialize=False), ), ]
en
0.842887
# Generated by Django 2.2 on 2021-09-11 04:58
1.446276
1
geomstats/geometry/stratified/__init__.py
shubhamtalbar96/geomstats
0
9852
"""The Stratified Space Geometry Package."""
"""The Stratified Space Geometry Package."""
en
0.635839
The Stratified Space Geometry Package.
0.866009
1
src/exporter/management/commands/test_export.py
xmdy/h9eNi8F5Ut
0
9853
from django.core.management import BaseCommand import logging # These two lines enable debugging at httplib level (requests->urllib3->http.client) # You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA. # The only thing missing will be the response.body which is not logged. try: import http.client as http_client except ImportError: # Python 2 import httplib as http_client http_client.HTTPConnection.debuglevel = 1 # You must initialize logging, otherwise you'll not see debug output. logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) requests_log = logging.getLogger("requests.packages.urllib3") requests_log.setLevel(logging.DEBUG) requests_log.propagate = True class Command(BaseCommand): def handle(self, *args, **options): from exporter.tasks import GenerateModelExportTask gmet = GenerateModelExportTask() gmet.run(1)
from django.core.management import BaseCommand import logging # These two lines enable debugging at httplib level (requests->urllib3->http.client) # You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA. # The only thing missing will be the response.body which is not logged. try: import http.client as http_client except ImportError: # Python 2 import httplib as http_client http_client.HTTPConnection.debuglevel = 1 # You must initialize logging, otherwise you'll not see debug output. logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) requests_log = logging.getLogger("requests.packages.urllib3") requests_log.setLevel(logging.DEBUG) requests_log.propagate = True class Command(BaseCommand): def handle(self, *args, **options): from exporter.tasks import GenerateModelExportTask gmet = GenerateModelExportTask() gmet.run(1)
en
0.860071
# These two lines enable debugging at httplib level (requests->urllib3->http.client) # You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA. # The only thing missing will be the response.body which is not logged. # Python 2 # You must initialize logging, otherwise you'll not see debug output.
2.149099
2
dask/tests/test_highgraph.py
ianthomas23/dask
0
9854
from functools import partial import os import pytest import dask import dask.array as da from dask.utils_test import inc from dask.highlevelgraph import HighLevelGraph, BasicLayer, Layer from dask.blockwise import Blockwise from dask.array.utils import assert_eq def test_visualize(tmpdir): pytest.importorskip("graphviz") fn = str(tmpdir) a = da.ones(10, chunks=(5,)) b = a + 1 c = a + 2 d = b + c d.dask.visualize(fn) assert os.path.exists(fn) def test_basic(): a = {"x": 1} b = {"y": (inc, "x")} layers = {"a": a, "b": b} dependencies = {"a": set(), "b": {"a"}} hg = HighLevelGraph(layers, dependencies) assert dict(hg) == {"x": 1, "y": (inc, "x")} assert all(isinstance(layer, Layer) for layer in hg.layers.values()) def test_keys_values_items_methods(): a = da.ones(10, chunks=(5,)) b = a + 1 c = a + 2 d = b + c hg = d.dask keys, values, items = hg.keys(), hg.values(), hg.items() assert all(isinstance(i, list) for i in [keys, values, items]) assert keys == [i for i in hg] assert values == [hg[i] for i in hg] assert items == [(k, v) for k, v in zip(keys, values)] def test_cull(): a = {"x": 1, "y": (inc, "x")} layers = { "a": BasicLayer( a, dependencies={"x": set(), "y": {"x"}}, global_dependencies=set() ) } dependencies = {"a": set()} hg = HighLevelGraph(layers, dependencies) culled_by_x = hg.cull({"x"}) assert dict(culled_by_x) == {"x": 1} culled_by_y = hg.cull({"y"}) assert dict(culled_by_y) == a @pytest.mark.parametrize("inject_dict", [True, False]) def test_map_basic_layers(inject_dict): """Check map_basic_layers() by injecting an inc() call""" y = da.ones(3, chunks=(3,), dtype="int") + 40 def inject_inc(dsk): assert isinstance(dsk, BasicLayer) dsk = dict(dsk) k = next(iter(dsk)) dsk[k] = (inc, dsk[k]) if inject_dict: return dsk # map_basic_layers() should automatically convert it to a `BasicLayer` else: return BasicLayer(dsk) dsk = y.__dask_graph__() y.dask = dsk.map_basic_layers(inject_inc) layers = list(y.dask.layers.values()) assert isinstance(layers[0], BasicLayer) assert isinstance(layers[1], Blockwise) assert_eq(y, [42] * 3) @pytest.mark.parametrize("use_layer_map_task", [True, False]) def test_map_tasks(use_layer_map_task): """Check map_tasks() by injecting an +1 to the `40` literal""" y = da.ones(3, chunks=(3,), dtype="int") + 40 def plus_one(tasks): ret = [] for t in tasks: if t == 40: t += 1 ret.append(t) return tuple(ret) dsk = y.__dask_graph__() if use_layer_map_task: # In order to test the default map_tasks() implementation on a Blockwise Layer, # we overwrite Blockwise.map_tasks with Layer.map_tasks blockwise_layer = list(dsk.layers.values())[1] blockwise_layer.map_tasks = partial(Layer.map_tasks, blockwise_layer) y.dask = dsk.map_tasks(plus_one) assert_eq(y, [42] * 3) def annot_map_fn(key): return key[1:] @pytest.mark.parametrize( "annotation", [ {"worker": "alice"}, {"block_id": annot_map_fn}, ], ) def test_single_annotation(annotation): with dask.annotate(**annotation): A = da.ones((10, 10), chunks=(5, 5)) alayer = A.__dask_graph__().layers[A.name] assert alayer.annotations == annotation assert dask.config.get("annotations", None) is None def test_multiple_annotations(): with dask.annotate(block_id=annot_map_fn): with dask.annotate(resource="GPU"): A = da.ones((10, 10), chunks=(5, 5)) B = A + 1 C = B + 1 assert dask.config.get("annotations", None) is None alayer = A.__dask_graph__().layers[A.name] blayer = B.__dask_graph__().layers[B.name] clayer = C.__dask_graph__().layers[C.name] assert alayer.annotations == {"resource": "GPU", "block_id": annot_map_fn} assert blayer.annotations == {"block_id": annot_map_fn} assert clayer.annotations is None
from functools import partial import os import pytest import dask import dask.array as da from dask.utils_test import inc from dask.highlevelgraph import HighLevelGraph, BasicLayer, Layer from dask.blockwise import Blockwise from dask.array.utils import assert_eq def test_visualize(tmpdir): pytest.importorskip("graphviz") fn = str(tmpdir) a = da.ones(10, chunks=(5,)) b = a + 1 c = a + 2 d = b + c d.dask.visualize(fn) assert os.path.exists(fn) def test_basic(): a = {"x": 1} b = {"y": (inc, "x")} layers = {"a": a, "b": b} dependencies = {"a": set(), "b": {"a"}} hg = HighLevelGraph(layers, dependencies) assert dict(hg) == {"x": 1, "y": (inc, "x")} assert all(isinstance(layer, Layer) for layer in hg.layers.values()) def test_keys_values_items_methods(): a = da.ones(10, chunks=(5,)) b = a + 1 c = a + 2 d = b + c hg = d.dask keys, values, items = hg.keys(), hg.values(), hg.items() assert all(isinstance(i, list) for i in [keys, values, items]) assert keys == [i for i in hg] assert values == [hg[i] for i in hg] assert items == [(k, v) for k, v in zip(keys, values)] def test_cull(): a = {"x": 1, "y": (inc, "x")} layers = { "a": BasicLayer( a, dependencies={"x": set(), "y": {"x"}}, global_dependencies=set() ) } dependencies = {"a": set()} hg = HighLevelGraph(layers, dependencies) culled_by_x = hg.cull({"x"}) assert dict(culled_by_x) == {"x": 1} culled_by_y = hg.cull({"y"}) assert dict(culled_by_y) == a @pytest.mark.parametrize("inject_dict", [True, False]) def test_map_basic_layers(inject_dict): """Check map_basic_layers() by injecting an inc() call""" y = da.ones(3, chunks=(3,), dtype="int") + 40 def inject_inc(dsk): assert isinstance(dsk, BasicLayer) dsk = dict(dsk) k = next(iter(dsk)) dsk[k] = (inc, dsk[k]) if inject_dict: return dsk # map_basic_layers() should automatically convert it to a `BasicLayer` else: return BasicLayer(dsk) dsk = y.__dask_graph__() y.dask = dsk.map_basic_layers(inject_inc) layers = list(y.dask.layers.values()) assert isinstance(layers[0], BasicLayer) assert isinstance(layers[1], Blockwise) assert_eq(y, [42] * 3) @pytest.mark.parametrize("use_layer_map_task", [True, False]) def test_map_tasks(use_layer_map_task): """Check map_tasks() by injecting an +1 to the `40` literal""" y = da.ones(3, chunks=(3,), dtype="int") + 40 def plus_one(tasks): ret = [] for t in tasks: if t == 40: t += 1 ret.append(t) return tuple(ret) dsk = y.__dask_graph__() if use_layer_map_task: # In order to test the default map_tasks() implementation on a Blockwise Layer, # we overwrite Blockwise.map_tasks with Layer.map_tasks blockwise_layer = list(dsk.layers.values())[1] blockwise_layer.map_tasks = partial(Layer.map_tasks, blockwise_layer) y.dask = dsk.map_tasks(plus_one) assert_eq(y, [42] * 3) def annot_map_fn(key): return key[1:] @pytest.mark.parametrize( "annotation", [ {"worker": "alice"}, {"block_id": annot_map_fn}, ], ) def test_single_annotation(annotation): with dask.annotate(**annotation): A = da.ones((10, 10), chunks=(5, 5)) alayer = A.__dask_graph__().layers[A.name] assert alayer.annotations == annotation assert dask.config.get("annotations", None) is None def test_multiple_annotations(): with dask.annotate(block_id=annot_map_fn): with dask.annotate(resource="GPU"): A = da.ones((10, 10), chunks=(5, 5)) B = A + 1 C = B + 1 assert dask.config.get("annotations", None) is None alayer = A.__dask_graph__().layers[A.name] blayer = B.__dask_graph__().layers[B.name] clayer = C.__dask_graph__().layers[C.name] assert alayer.annotations == {"resource": "GPU", "block_id": annot_map_fn} assert blayer.annotations == {"block_id": annot_map_fn} assert clayer.annotations is None
en
0.635302
Check map_basic_layers() by injecting an inc() call # map_basic_layers() should automatically convert it to a `BasicLayer` Check map_tasks() by injecting an +1 to the `40` literal # In order to test the default map_tasks() implementation on a Blockwise Layer, # we overwrite Blockwise.map_tasks with Layer.map_tasks
2.164969
2
transference.py
webpwnized/cryptography
13
9855
# Requires pip install bitarray from bitarray import bitarray import argparse, math def derive_transfer_function(pTransferFunctionString: str) -> list: lTransferFunction = list(map(int, pTransferFunctionString.split(','))) lTransferFunctionValid = True lLengthTransferFunction = len(lTransferFunction) for i in range(0, lLengthTransferFunction): if i not in lTransferFunction: lTransferFunctionValid = False break # end if # end for if not lTransferFunctionValid: raise Exception('Transfer function must contain all integers from 0 to N where (N - 1) is length of the substitution array.') lExponent = math.log(lLengthTransferFunction, 2) if lExponent != math.floor(lExponent): raise Exception('Transfer function length must be even power of 2.') return lTransferFunction def print_transfer_function_table(pTransferFunction: list) -> None: lLengthTransferFunction = len(pTransferFunction) lNumberBits = int(math.log(lLengthTransferFunction, 2)) lFormat = '0' + str(lNumberBits) + 'b' # print column headers print() for i in range(0, lNumberBits): print("x=" + str(i) + "\t", end="") for i in range(0, lNumberBits): print("y=" + str(i) + "\t", end="") print() # print values for transfer function for lIndex, lSubstitutionValue in enumerate(pTransferFunction): lBinaryIndex = bitarray(format(lIndex, lFormat)) lBinarySV = bitarray(format(lSubstitutionValue, lFormat)) for i in range(0, lNumberBits): print(int(lBinaryIndex[i]), end="") print("\t", end="") for i in range(0, lNumberBits): print(int(lBinarySV[i]), end="") print("\t", end="") print() print() def print_linear_approximation_table(pTransferFunction: list) -> None: lLengthTransferFunction = len(pTransferFunction) lNumberBits = int(math.log(lLengthTransferFunction, 2)) lFormat = '0' + str(lNumberBits) + 'b' # print column headers print("\t", end="") for i in range(0, lLengthTransferFunction): print("b=" + str(i) + "\t", end="") print() for lA in range(0, lLengthTransferFunction): # print row header print("a=" + str(lA) + "\t", end="") for lB in range(0, lLengthTransferFunction): a = bitarray(format(lA, lFormat)) b = bitarray(format(lB, lFormat)) lCount = 0 for lX, lY in enumerate(pTransferFunction): x = bitarray(format(lX, lFormat)) y = bitarray(format(lY, lFormat)) lVectorXorOfAX = 0 for i in range(0, lNumberBits): lVectorXorOfAX ^= int(a[i]) * int(x[i]) lVectorXorOfBY = 0 for i in range(0, lNumberBits): lVectorXorOfBY ^= int(b[i]) * int(y[i]) lAXxorBY = lVectorXorOfAX ^ lVectorXorOfBY if lAXxorBY == 0: lCount += 1 # end looping through transfer function print(str(lCount) + "\t", end="") # end for b print() # end for a if __name__ == '__main__': lArgParser = argparse.ArgumentParser(description='Transference: A tool to help visualize s-boxes (substitution boxes or transfer functions)') lArgParser.add_argument('-tft', '--transfer-function-table', help='Print the transfer function table for the s-box', action='store_true') lArgParser.add_argument('-lat', '--linear-approximation-table', help='Calculate the linear transformation table for the s-box', action='store_true') lArgParser.add_argument('-all', '--all', help='Calculate the linear transformation table for the s-box', action='store_true') lArgParser.add_argument('-v', '--verbose', help='Enables verbose output', action='store_true') lArgParser.add_argument('INPUT', action='store', type=str, help='The substitution table (s-box) represented as a comma delimted list of integers. The length of the list is the number of bits in the substitution. Required. Example: 3,2,0,1 means substitute 3 for 0, 2 for 1, 0 for 2 and 1 for 3. ') lArgs = lArgParser.parse_args() lTransferFunction = derive_transfer_function(lArgs.INPUT) if lArgs.all: lArgs.transfer_function_table = lArgs.linear_approximation_table = True if lArgs.transfer_function_table: print_transfer_function_table(lTransferFunction) if lArgs.linear_approximation_table: print_linear_approximation_table(lTransferFunction)
# Requires pip install bitarray from bitarray import bitarray import argparse, math def derive_transfer_function(pTransferFunctionString: str) -> list: lTransferFunction = list(map(int, pTransferFunctionString.split(','))) lTransferFunctionValid = True lLengthTransferFunction = len(lTransferFunction) for i in range(0, lLengthTransferFunction): if i not in lTransferFunction: lTransferFunctionValid = False break # end if # end for if not lTransferFunctionValid: raise Exception('Transfer function must contain all integers from 0 to N where (N - 1) is length of the substitution array.') lExponent = math.log(lLengthTransferFunction, 2) if lExponent != math.floor(lExponent): raise Exception('Transfer function length must be even power of 2.') return lTransferFunction def print_transfer_function_table(pTransferFunction: list) -> None: lLengthTransferFunction = len(pTransferFunction) lNumberBits = int(math.log(lLengthTransferFunction, 2)) lFormat = '0' + str(lNumberBits) + 'b' # print column headers print() for i in range(0, lNumberBits): print("x=" + str(i) + "\t", end="") for i in range(0, lNumberBits): print("y=" + str(i) + "\t", end="") print() # print values for transfer function for lIndex, lSubstitutionValue in enumerate(pTransferFunction): lBinaryIndex = bitarray(format(lIndex, lFormat)) lBinarySV = bitarray(format(lSubstitutionValue, lFormat)) for i in range(0, lNumberBits): print(int(lBinaryIndex[i]), end="") print("\t", end="") for i in range(0, lNumberBits): print(int(lBinarySV[i]), end="") print("\t", end="") print() print() def print_linear_approximation_table(pTransferFunction: list) -> None: lLengthTransferFunction = len(pTransferFunction) lNumberBits = int(math.log(lLengthTransferFunction, 2)) lFormat = '0' + str(lNumberBits) + 'b' # print column headers print("\t", end="") for i in range(0, lLengthTransferFunction): print("b=" + str(i) + "\t", end="") print() for lA in range(0, lLengthTransferFunction): # print row header print("a=" + str(lA) + "\t", end="") for lB in range(0, lLengthTransferFunction): a = bitarray(format(lA, lFormat)) b = bitarray(format(lB, lFormat)) lCount = 0 for lX, lY in enumerate(pTransferFunction): x = bitarray(format(lX, lFormat)) y = bitarray(format(lY, lFormat)) lVectorXorOfAX = 0 for i in range(0, lNumberBits): lVectorXorOfAX ^= int(a[i]) * int(x[i]) lVectorXorOfBY = 0 for i in range(0, lNumberBits): lVectorXorOfBY ^= int(b[i]) * int(y[i]) lAXxorBY = lVectorXorOfAX ^ lVectorXorOfBY if lAXxorBY == 0: lCount += 1 # end looping through transfer function print(str(lCount) + "\t", end="") # end for b print() # end for a if __name__ == '__main__': lArgParser = argparse.ArgumentParser(description='Transference: A tool to help visualize s-boxes (substitution boxes or transfer functions)') lArgParser.add_argument('-tft', '--transfer-function-table', help='Print the transfer function table for the s-box', action='store_true') lArgParser.add_argument('-lat', '--linear-approximation-table', help='Calculate the linear transformation table for the s-box', action='store_true') lArgParser.add_argument('-all', '--all', help='Calculate the linear transformation table for the s-box', action='store_true') lArgParser.add_argument('-v', '--verbose', help='Enables verbose output', action='store_true') lArgParser.add_argument('INPUT', action='store', type=str, help='The substitution table (s-box) represented as a comma delimted list of integers. The length of the list is the number of bits in the substitution. Required. Example: 3,2,0,1 means substitute 3 for 0, 2 for 1, 0 for 2 and 1 for 3. ') lArgs = lArgParser.parse_args() lTransferFunction = derive_transfer_function(lArgs.INPUT) if lArgs.all: lArgs.transfer_function_table = lArgs.linear_approximation_table = True if lArgs.transfer_function_table: print_transfer_function_table(lTransferFunction) if lArgs.linear_approximation_table: print_linear_approximation_table(lTransferFunction)
en
0.361015
# Requires pip install bitarray # end if # end for # print column headers # print values for transfer function # print column headers # print row header # end looping through transfer function # end for b # end for a
2.866055
3
tests/test_client.py
mjcaley/spamc
0
9856
import pytest from aiospamc.client import Client from aiospamc.exceptions import ( BadResponse, UsageException, DataErrorException, NoInputException, NoUserException, NoHostException, UnavailableException, InternalSoftwareException, OSErrorException, OSFileException, CantCreateException, IOErrorException, TemporaryFailureException, ProtocolException, NoPermissionException, ConfigException, ServerTimeoutException, ResponseException, ) from aiospamc.responses import Response async def test_request_sent_to_connection(mock_client_dependency, mocker, hostname): mock_req = mocker.MagicMock() await mock_client_dependency.request(mock_req, host=hostname) assert ( bytes(mock_req) == mock_client_dependency.connection_factory().request.await_args[0][0] ) async def test_request_response_sent_to_parser( mock_client_dependency, mocker, hostname ): mock_req = mocker.MagicMock() connection = mock_client_dependency.connection_factory() parser = mock_client_dependency.parser_factory() mocker.spy(parser, "parse") await mock_client_dependency.request(mock_req, host=hostname) response = connection.request.return_value assert response == parser.parse.call_args[0][0] async def test_request_returns_response(mock_client_dependency, mocker, hostname): mock_req = mocker.MagicMock() connection = mock_client_dependency.connection_factory() parser = mock_client_dependency.parser_factory() parse_spy = mocker.spy(parser, "parse") result = await mock_client_dependency.request(mock_req, host=hostname) expected = Response(**parse_spy.spy_return) assert expected == result async def test_request_raises_usage(mock_client_response, mocker, ex_usage, hostname): mock_client = mock_client_response(ex_usage) with pytest.raises(UsageException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_data_err( mock_client_response, mocker, ex_data_err, hostname ): mock_client = mock_client_response(ex_data_err) with pytest.raises(DataErrorException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_input( mock_client_response, mocker, ex_no_input, hostname ): mock_client = mock_client_response(ex_no_input) with pytest.raises(NoInputException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_user( mock_client_response, mocker, ex_no_user, hostname ): mock_client = mock_client_response(ex_no_user) with pytest.raises(NoUserException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_host( mock_client_response, mocker, ex_no_host, hostname ): mock_client = mock_client_response(ex_no_host) with pytest.raises(NoHostException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_unavailable( mock_client_response, mocker, ex_unavailable, hostname ): mock_client = mock_client_response(ex_unavailable) with pytest.raises(UnavailableException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_software( mock_client_response, mocker, ex_software, hostname ): mock_client = mock_client_response(ex_software) with pytest.raises(InternalSoftwareException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_os_error( mock_client_response, mocker, ex_os_err, hostname ): mock_client = mock_client_response(ex_os_err) with pytest.raises(OSErrorException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_os_file( mock_client_response, mocker, ex_os_file, hostname ): mock_client = mock_client_response(ex_os_file) with pytest.raises(OSFileException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_cant_create( mock_client_response, mocker, ex_cant_create, hostname ): mock_client = mock_client_response(ex_cant_create) with pytest.raises(CantCreateException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_io_error( mock_client_response, mocker, ex_io_err, hostname ): mock_client = mock_client_response(ex_io_err) with pytest.raises(IOErrorException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_temporary_failure( mock_client_response, mocker, ex_temp_fail, hostname ): mock_client = mock_client_response(ex_temp_fail) with pytest.raises(TemporaryFailureException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_protocol( mock_client_response, mocker, ex_protocol, hostname ): mock_client = mock_client_response(ex_protocol) with pytest.raises(ProtocolException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_permission( mock_client_response, mocker, ex_no_perm, hostname ): mock_client = mock_client_response(ex_no_perm) with pytest.raises(NoPermissionException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_config(mock_client_response, mocker, ex_config, hostname): mock_client = mock_client_response(ex_config) with pytest.raises(ConfigException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_timeout( mock_client_response, mocker, ex_timeout, hostname ): mock_client = mock_client_response(ex_timeout) with pytest.raises(ServerTimeoutException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_undefined( mock_client_response, mocker, ex_undefined, hostname ): mock_client = mock_client_response(ex_undefined) with pytest.raises(ResponseException): await mock_client.request(mocker.MagicMock(), host=hostname)
import pytest from aiospamc.client import Client from aiospamc.exceptions import ( BadResponse, UsageException, DataErrorException, NoInputException, NoUserException, NoHostException, UnavailableException, InternalSoftwareException, OSErrorException, OSFileException, CantCreateException, IOErrorException, TemporaryFailureException, ProtocolException, NoPermissionException, ConfigException, ServerTimeoutException, ResponseException, ) from aiospamc.responses import Response async def test_request_sent_to_connection(mock_client_dependency, mocker, hostname): mock_req = mocker.MagicMock() await mock_client_dependency.request(mock_req, host=hostname) assert ( bytes(mock_req) == mock_client_dependency.connection_factory().request.await_args[0][0] ) async def test_request_response_sent_to_parser( mock_client_dependency, mocker, hostname ): mock_req = mocker.MagicMock() connection = mock_client_dependency.connection_factory() parser = mock_client_dependency.parser_factory() mocker.spy(parser, "parse") await mock_client_dependency.request(mock_req, host=hostname) response = connection.request.return_value assert response == parser.parse.call_args[0][0] async def test_request_returns_response(mock_client_dependency, mocker, hostname): mock_req = mocker.MagicMock() connection = mock_client_dependency.connection_factory() parser = mock_client_dependency.parser_factory() parse_spy = mocker.spy(parser, "parse") result = await mock_client_dependency.request(mock_req, host=hostname) expected = Response(**parse_spy.spy_return) assert expected == result async def test_request_raises_usage(mock_client_response, mocker, ex_usage, hostname): mock_client = mock_client_response(ex_usage) with pytest.raises(UsageException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_data_err( mock_client_response, mocker, ex_data_err, hostname ): mock_client = mock_client_response(ex_data_err) with pytest.raises(DataErrorException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_input( mock_client_response, mocker, ex_no_input, hostname ): mock_client = mock_client_response(ex_no_input) with pytest.raises(NoInputException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_user( mock_client_response, mocker, ex_no_user, hostname ): mock_client = mock_client_response(ex_no_user) with pytest.raises(NoUserException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_host( mock_client_response, mocker, ex_no_host, hostname ): mock_client = mock_client_response(ex_no_host) with pytest.raises(NoHostException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_unavailable( mock_client_response, mocker, ex_unavailable, hostname ): mock_client = mock_client_response(ex_unavailable) with pytest.raises(UnavailableException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_software( mock_client_response, mocker, ex_software, hostname ): mock_client = mock_client_response(ex_software) with pytest.raises(InternalSoftwareException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_os_error( mock_client_response, mocker, ex_os_err, hostname ): mock_client = mock_client_response(ex_os_err) with pytest.raises(OSErrorException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_os_file( mock_client_response, mocker, ex_os_file, hostname ): mock_client = mock_client_response(ex_os_file) with pytest.raises(OSFileException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_cant_create( mock_client_response, mocker, ex_cant_create, hostname ): mock_client = mock_client_response(ex_cant_create) with pytest.raises(CantCreateException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_io_error( mock_client_response, mocker, ex_io_err, hostname ): mock_client = mock_client_response(ex_io_err) with pytest.raises(IOErrorException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_temporary_failure( mock_client_response, mocker, ex_temp_fail, hostname ): mock_client = mock_client_response(ex_temp_fail) with pytest.raises(TemporaryFailureException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_protocol( mock_client_response, mocker, ex_protocol, hostname ): mock_client = mock_client_response(ex_protocol) with pytest.raises(ProtocolException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_no_permission( mock_client_response, mocker, ex_no_perm, hostname ): mock_client = mock_client_response(ex_no_perm) with pytest.raises(NoPermissionException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_config(mock_client_response, mocker, ex_config, hostname): mock_client = mock_client_response(ex_config) with pytest.raises(ConfigException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_timeout( mock_client_response, mocker, ex_timeout, hostname ): mock_client = mock_client_response(ex_timeout) with pytest.raises(ServerTimeoutException): await mock_client.request(mocker.MagicMock(), host=hostname) async def test_request_raises_undefined( mock_client_response, mocker, ex_undefined, hostname ): mock_client = mock_client_response(ex_undefined) with pytest.raises(ResponseException): await mock_client.request(mocker.MagicMock(), host=hostname)
none
1
2.16609
2
sunpy/conftest.py
tacaswell/sunpy
0
9857
<reponame>tacaswell/sunpy import os import tempfile import importlib import pytest import astropy import astropy.config.paths # Force MPL to use non-gui backends for testing. try: import matplotlib except ImportError: pass else: matplotlib.use('Agg') # Don't actually import pytest_remotedata because that can do things to the # entrypoints code in pytest. remotedata_spec = importlib.util.find_spec("pytest_remotedata") HAVE_REMOTEDATA = remotedata_spec is not None # Do not collect the sample data file because this would download the sample data. collect_ignore = ["data/sample.py"] @pytest.fixture(scope='session', autouse=True) def tmp_config_dir(request): """ Globally set the default config for all tests. """ tmpdir = tempfile.TemporaryDirectory() os.environ["SUNPY_CONFIGDIR"] = str(tmpdir.name) astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name) astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name) yield del os.environ["SUNPY_CONFIGDIR"] tmpdir.cleanup() astropy.config.paths.set_temp_config._temp_path = None astropy.config.paths.set_temp_cache._temp_path = None @pytest.fixture() def sunpy_cache(mocker, tmp_path): """ Provide a way to add local files to the cache. This can be useful when mocking remote requests. """ from types import MethodType from sunpy.data.data_manager.cache import Cache from sunpy.data.data_manager.downloader import ParfiveDownloader from sunpy.data.data_manager.storage import InMemStorage cache = Cache( ParfiveDownloader(), InMemStorage(), tmp_path, None ) def add(self, url, path): self._storage.store({ 'url': url, 'file_path': path, 'file_hash': 'none', # hash doesn't matter }) cache.add = MethodType(add, cache) def func(mocked): mocker.patch(mocked, cache) return cache yield func @pytest.fixture() def undo_config_dir_patch(): """ Provide a way for certain tests to not have the config dir. """ oridir = os.environ["SUNPY_CONFIGDIR"] del os.environ["SUNPY_CONFIGDIR"] yield os.environ["SUNPY_CONFIGDIR"] = oridir @pytest.fixture(scope='session', autouse=True) def hide_parfive_progress(request): """ Globally set the HIDE_PARFIVE_PROGESS to hide the parfive progress bar in tests. Used by the parfive helper class only. """ os.environ["HIDE_PARFIVE_PROGESS"] = "True" yield del os.environ["HIDE_PARFIVE_PROGESS"] @pytest.fixture(scope='session', autouse=True) def tmp_dl_dir(request): """ Globally set the default download directory for the test run to a tmp dir. """ with tempfile.TemporaryDirectory() as tmpdir: os.environ["SUNPY_DOWNLOADDIR"] = tmpdir yield tmpdir del os.environ["SUNPY_DOWNLOADDIR"] @pytest.fixture() def undo_download_dir_patch(): """ Provide a way for certain tests to not have tmp download dir. """ oridir = os.environ["SUNPY_DOWNLOADDIR"] del os.environ["SUNPY_DOWNLOADDIR"] yield os.environ["SUNPY_DOWNLOADDIR"] = oridir def pytest_runtest_setup(item): """ pytest hook to skip all tests that have the mark 'remotedata' if the pytest_remotedata plugin is not installed. """ if isinstance(item, pytest.Function): if 'remote_data' in item.keywords and not HAVE_REMOTEDATA: pytest.skip("skipping remotedata tests as pytest-remotedata is not installed")
import os import tempfile import importlib import pytest import astropy import astropy.config.paths # Force MPL to use non-gui backends for testing. try: import matplotlib except ImportError: pass else: matplotlib.use('Agg') # Don't actually import pytest_remotedata because that can do things to the # entrypoints code in pytest. remotedata_spec = importlib.util.find_spec("pytest_remotedata") HAVE_REMOTEDATA = remotedata_spec is not None # Do not collect the sample data file because this would download the sample data. collect_ignore = ["data/sample.py"] @pytest.fixture(scope='session', autouse=True) def tmp_config_dir(request): """ Globally set the default config for all tests. """ tmpdir = tempfile.TemporaryDirectory() os.environ["SUNPY_CONFIGDIR"] = str(tmpdir.name) astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name) astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name) yield del os.environ["SUNPY_CONFIGDIR"] tmpdir.cleanup() astropy.config.paths.set_temp_config._temp_path = None astropy.config.paths.set_temp_cache._temp_path = None @pytest.fixture() def sunpy_cache(mocker, tmp_path): """ Provide a way to add local files to the cache. This can be useful when mocking remote requests. """ from types import MethodType from sunpy.data.data_manager.cache import Cache from sunpy.data.data_manager.downloader import ParfiveDownloader from sunpy.data.data_manager.storage import InMemStorage cache = Cache( ParfiveDownloader(), InMemStorage(), tmp_path, None ) def add(self, url, path): self._storage.store({ 'url': url, 'file_path': path, 'file_hash': 'none', # hash doesn't matter }) cache.add = MethodType(add, cache) def func(mocked): mocker.patch(mocked, cache) return cache yield func @pytest.fixture() def undo_config_dir_patch(): """ Provide a way for certain tests to not have the config dir. """ oridir = os.environ["SUNPY_CONFIGDIR"] del os.environ["SUNPY_CONFIGDIR"] yield os.environ["SUNPY_CONFIGDIR"] = oridir @pytest.fixture(scope='session', autouse=True) def hide_parfive_progress(request): """ Globally set the HIDE_PARFIVE_PROGESS to hide the parfive progress bar in tests. Used by the parfive helper class only. """ os.environ["HIDE_PARFIVE_PROGESS"] = "True" yield del os.environ["HIDE_PARFIVE_PROGESS"] @pytest.fixture(scope='session', autouse=True) def tmp_dl_dir(request): """ Globally set the default download directory for the test run to a tmp dir. """ with tempfile.TemporaryDirectory() as tmpdir: os.environ["SUNPY_DOWNLOADDIR"] = tmpdir yield tmpdir del os.environ["SUNPY_DOWNLOADDIR"] @pytest.fixture() def undo_download_dir_patch(): """ Provide a way for certain tests to not have tmp download dir. """ oridir = os.environ["SUNPY_DOWNLOADDIR"] del os.environ["SUNPY_DOWNLOADDIR"] yield os.environ["SUNPY_DOWNLOADDIR"] = oridir def pytest_runtest_setup(item): """ pytest hook to skip all tests that have the mark 'remotedata' if the pytest_remotedata plugin is not installed. """ if isinstance(item, pytest.Function): if 'remote_data' in item.keywords and not HAVE_REMOTEDATA: pytest.skip("skipping remotedata tests as pytest-remotedata is not installed")
en
0.837634
# Force MPL to use non-gui backends for testing. # Don't actually import pytest_remotedata because that can do things to the # entrypoints code in pytest. # Do not collect the sample data file because this would download the sample data. Globally set the default config for all tests. Provide a way to add local files to the cache. This can be useful when mocking remote requests. # hash doesn't matter Provide a way for certain tests to not have the config dir. Globally set the HIDE_PARFIVE_PROGESS to hide the parfive progress bar in tests. Used by the parfive helper class only. Globally set the default download directory for the test run to a tmp dir. Provide a way for certain tests to not have tmp download dir. pytest hook to skip all tests that have the mark 'remotedata' if the pytest_remotedata plugin is not installed.
2.140099
2
qtcalendar/models.py
asmateus/PyQtCalendar
7
9858
''' Models for QtWidgets ''' from collections import deque from math import ceil import datetime as dt import calendar class EventInCalendar__Model: class Text: @staticmethod def getDefault(): return EventInCalendar__Model.Text() def __init__(self, event=None, overflow=False): if event is None: self.init_date = dt.datetime(1, 1, 1) self.end_date = dt.datetime(9999, 12, 31) self.place = Event__Model.Place() else: if overflow: self.init_date = dt.datetime.combine( event.getInitDate().date(), dt.time(0, 0, 0)) else: self.init_date = event.getInitDate() self.end_date = event.getEndDate() self.place = event.getPlace() def __str__(self): init_time, end_time = self.init_date.time(), self.end_date.time() return ' '.join([str(i) for i in [init_time, end_time, self.place]]) @staticmethod def colorOf(val): range_list = [ (0.0, 0.2, 'rgb(178, 0, 0)'), (0.2, 0.5, 'rgb(255, 40, 40)'), (0.5, 0.7, 'rgb(191, 165, 0)'), (0.7, 1.0, 'rgb(252, 224, 45)'), (1.0, 1.1, 'rgb(46, 234, 81)'), ] for lw, hi, c in range_list: if lw <= val and hi > val: return c def __init__(self, master, overflow): self._fulfillment = 0.0 self._overflow = overflow self._master = master self._event = None def getFulFillmentStatus(self, numeric=False): if not numeric: return EventInCalendar__Model.colorOf(self._fulfillment) return self._fulfillment def setEvent(self, event): self._event = event.getModel() self._fulfillment = self._event.getFulFillmentStatus() def __str__(self): if self._event is None: return EventInCalendar__Model.Text().__str__() return EventInCalendar__Model.Text(self._event, self._overflow).__str__() class Event__Model: class Place: def __init__(self, name='NA', people=0): self.name = name self.people = people def __str__(self): return self.name def __init__(self, init_date, end_date, place, fulfillment=0.0): self._init_date = init_date self._end_date = end_date self._place = place self._fulfillment = fulfillment def getFulFillmentStatus(self): return self._fulfillment def getInitDate(self): return self._init_date def getEndDate(self): return self._end_date def getPlace(self): return self._place class Date__Model: TYPE_WEEKDAY = 0 TYPE_WEEKEND = 1 TYPE_HOLYDAY = 2 TYPE_FREEDAY = 3 TYPE_GRAYDAY = 4 @staticmethod def colorOf(val): color_list = [ (Date__Model.TYPE_WEEKDAY, (219, 219, 219)), (Date__Model.TYPE_WEEKEND, (183, 183, 183)), (Date__Model.TYPE_HOLYDAY, (183, 183, 183)), (Date__Model.TYPE_FREEDAY, (0, 216, 255)), (Date__Model.TYPE_GRAYDAY, (255, 255, 255)), ] for d, c in color_list: if d == val: return c return color_list[0][1] def __init__(self, master, date): self._master = master self._events = list() self._date = date self._date_type = Date__Model.TYPE_WEEKDAY def setDate(self, date, datetype=TYPE_WEEKDAY): self._date = date self._date_type = datetype def getDate(self): return self._date def getDateType(self, numeric=False): if numeric is False: return Date__Model.colorOf(self._date_type) return self._date_type def addEvent(self, event): self._events.append(event) def getEvents(self): return self._events class Calendar__Model: TYPE_MONDAY_LEADING = 0 TYPE_TUESDAY_LEADING = 1 TYPE_WEDNESDAY_LEADING = 2 TYPE_THURSDAY_LEADING = 3 TYPE_FRIDAY_LEADING = 4 TYPE_SATURDAY_LEADING = 5 TYPE_SUNDAY_LEADING = 6 MAX_DIM_X = 7 MAX_DIM_Y = 6 WEEKENDS = [5, 6] @staticmethod def dayOf(date, init, datatree): ''' Returns the day of the week of a given date and the position of that day in the calendar grid. The returned text value of the day is recovered from the stringer module. ''' days = datatree['str']['days'] # Get the day of the week of the selected date datetuple = tuple([int(s) for s in str(date).split(' ')[0].split('-')]) day = days[list(zip(*days))[0].index(calendar.weekday(*datetuple))][1] # Horizontal position in the grid is deduced from the selected leading day days_dq = deque(days) days_dq.rotate(7 - init) pos_x = list(zip(*days_dq))[0].index(calendar.weekday(*datetuple)) # Vertical position is deduced from the selected leading day and the # day of the first date of that month firstmonthday = (datetuple[0], datetuple[1], 1) fday = list(zip(*days_dq))[0].index(calendar.weekday(*firstmonthday)) pos_y = ceil((fday + date.day) / 7) - 1 # Return the place in the calendar grid depending on the offset return day, pos_x, pos_y def __init__(self, master, ctype=TYPE_SUNDAY_LEADING, holidays=list()): ''' Calendar constructor, a calendar is an array of dates that should always be full, thus, initialy an array of empty dates (6x7), is array is called holders; a second empty array of dates is created and will replace eventually the dates of the respective holder date. Both arrays are validated through a snapshot array, the snapshot refers to the dates that fill the Calendar grid for a current month, be those dates from the actual month or the adjacent months ''' self._master = master self._type = ctype self._holidays = holidays # Assume month as current month self._month = tuple([dt.date.today().year, dt.date.today().month]) # Generate the snapshot for the current month self._snapshot = self.generateSnapshot() # Create empty dates from the snapshot self._dates = self.generateDefaultDates() def generateSnapshot(self): rt = list() if self._month is None: return rt # First day of month first_day = dt.date(self._month[0], self._month[1], 1) # Find day of first position in calendar grid offset = Calendar__Model.dayOf(first_day, self._type, self._master.getDataTree())[1] first_day -= dt.timedelta(offset) # Once first position is encountered, fill the holder array for i in range(Calendar__Model.MAX_DIM_X * Calendar__Model.MAX_DIM_Y): rt.append(first_day) first_day += dt.timedelta(1) return rt def generateDefaultDates(self): rt = list() for date in self._snapshot: created_date = self._master.createDate(date) self.setDateType(created_date) rt.append(created_date) return rt def addDate(self, date): if self._month is not None: if date.getModel().getDate() in self._snapshot: index = self._snapshot.index(date.getModel().getDate()) self.setDateType(date) self._dates[index] = date def addEventInCalendar(self, date, eic): if self._month is not None: if date in self._snapshot: index = self._snapshot.index(date) self._dates[index].addCalendarEvent(eic) def setDateType(self, date): current_type = date.getModel().getDateType(numeric=True) deduced_type = Date__Model.TYPE_WEEKDAY dt_date = date.getModel().getDate() dt_tuple = (dt_date.year, dt_date.month, dt_date.day) if calendar.weekday(*dt_tuple) in Calendar__Model.WEEKENDS: deduced_type = Date__Model.TYPE_WEEKEND if dt_date in self._holidays: deduced_type = Date__Model.TYPE_HOLYDAY if (dt_date.year, dt_date.month) != self._month: deduced_type = Date__Model.TYPE_GRAYDAY if current_type < deduced_type: current_type = deduced_type date.changeDateType(current_type) def _update(self): self._snapshot = self.generateSnapshot() self._dates = self.generateDefaultDates() # Add the required events events = self._master.getEvents() events_to_add = list() for event in events: if event.getModel().getInitDate().date() in self._snapshot: events_to_add.append(event) self._master.createEvents(events_to_add) def setMonth(self, month): self._month = month self._update() def getMonth(self): return self._month def monthSubtract(self): month = self._month if month[1] == 1: if month[0] == 1: return month else: return (month[0] - 1, 12) else: return (month[0], month[1] - 1) def monthAdd(self): month = self._month if month[1] == 12: if month[0] == 9999: return month else: return (month[0] + 1, 1) else: return (month[0], month[1] + 1) def setDataTree(self, datatree): self._datatree = datatree self._update() def getDataTree(self): return self._datatree def posInSnapshot(self, date): i = self._snapshot.index(date) return ceil((i + 1) / 7) - 1, (i) % 7 def getHolderDimensions(self): return Calendar__Model.MAX_DIM_X, Calendar__Model.MAX_DIM_Y def getDates(self): return self._dates def getType(self): return self._type
''' Models for QtWidgets ''' from collections import deque from math import ceil import datetime as dt import calendar class EventInCalendar__Model: class Text: @staticmethod def getDefault(): return EventInCalendar__Model.Text() def __init__(self, event=None, overflow=False): if event is None: self.init_date = dt.datetime(1, 1, 1) self.end_date = dt.datetime(9999, 12, 31) self.place = Event__Model.Place() else: if overflow: self.init_date = dt.datetime.combine( event.getInitDate().date(), dt.time(0, 0, 0)) else: self.init_date = event.getInitDate() self.end_date = event.getEndDate() self.place = event.getPlace() def __str__(self): init_time, end_time = self.init_date.time(), self.end_date.time() return ' '.join([str(i) for i in [init_time, end_time, self.place]]) @staticmethod def colorOf(val): range_list = [ (0.0, 0.2, 'rgb(178, 0, 0)'), (0.2, 0.5, 'rgb(255, 40, 40)'), (0.5, 0.7, 'rgb(191, 165, 0)'), (0.7, 1.0, 'rgb(252, 224, 45)'), (1.0, 1.1, 'rgb(46, 234, 81)'), ] for lw, hi, c in range_list: if lw <= val and hi > val: return c def __init__(self, master, overflow): self._fulfillment = 0.0 self._overflow = overflow self._master = master self._event = None def getFulFillmentStatus(self, numeric=False): if not numeric: return EventInCalendar__Model.colorOf(self._fulfillment) return self._fulfillment def setEvent(self, event): self._event = event.getModel() self._fulfillment = self._event.getFulFillmentStatus() def __str__(self): if self._event is None: return EventInCalendar__Model.Text().__str__() return EventInCalendar__Model.Text(self._event, self._overflow).__str__() class Event__Model: class Place: def __init__(self, name='NA', people=0): self.name = name self.people = people def __str__(self): return self.name def __init__(self, init_date, end_date, place, fulfillment=0.0): self._init_date = init_date self._end_date = end_date self._place = place self._fulfillment = fulfillment def getFulFillmentStatus(self): return self._fulfillment def getInitDate(self): return self._init_date def getEndDate(self): return self._end_date def getPlace(self): return self._place class Date__Model: TYPE_WEEKDAY = 0 TYPE_WEEKEND = 1 TYPE_HOLYDAY = 2 TYPE_FREEDAY = 3 TYPE_GRAYDAY = 4 @staticmethod def colorOf(val): color_list = [ (Date__Model.TYPE_WEEKDAY, (219, 219, 219)), (Date__Model.TYPE_WEEKEND, (183, 183, 183)), (Date__Model.TYPE_HOLYDAY, (183, 183, 183)), (Date__Model.TYPE_FREEDAY, (0, 216, 255)), (Date__Model.TYPE_GRAYDAY, (255, 255, 255)), ] for d, c in color_list: if d == val: return c return color_list[0][1] def __init__(self, master, date): self._master = master self._events = list() self._date = date self._date_type = Date__Model.TYPE_WEEKDAY def setDate(self, date, datetype=TYPE_WEEKDAY): self._date = date self._date_type = datetype def getDate(self): return self._date def getDateType(self, numeric=False): if numeric is False: return Date__Model.colorOf(self._date_type) return self._date_type def addEvent(self, event): self._events.append(event) def getEvents(self): return self._events class Calendar__Model: TYPE_MONDAY_LEADING = 0 TYPE_TUESDAY_LEADING = 1 TYPE_WEDNESDAY_LEADING = 2 TYPE_THURSDAY_LEADING = 3 TYPE_FRIDAY_LEADING = 4 TYPE_SATURDAY_LEADING = 5 TYPE_SUNDAY_LEADING = 6 MAX_DIM_X = 7 MAX_DIM_Y = 6 WEEKENDS = [5, 6] @staticmethod def dayOf(date, init, datatree): ''' Returns the day of the week of a given date and the position of that day in the calendar grid. The returned text value of the day is recovered from the stringer module. ''' days = datatree['str']['days'] # Get the day of the week of the selected date datetuple = tuple([int(s) for s in str(date).split(' ')[0].split('-')]) day = days[list(zip(*days))[0].index(calendar.weekday(*datetuple))][1] # Horizontal position in the grid is deduced from the selected leading day days_dq = deque(days) days_dq.rotate(7 - init) pos_x = list(zip(*days_dq))[0].index(calendar.weekday(*datetuple)) # Vertical position is deduced from the selected leading day and the # day of the first date of that month firstmonthday = (datetuple[0], datetuple[1], 1) fday = list(zip(*days_dq))[0].index(calendar.weekday(*firstmonthday)) pos_y = ceil((fday + date.day) / 7) - 1 # Return the place in the calendar grid depending on the offset return day, pos_x, pos_y def __init__(self, master, ctype=TYPE_SUNDAY_LEADING, holidays=list()): ''' Calendar constructor, a calendar is an array of dates that should always be full, thus, initialy an array of empty dates (6x7), is array is called holders; a second empty array of dates is created and will replace eventually the dates of the respective holder date. Both arrays are validated through a snapshot array, the snapshot refers to the dates that fill the Calendar grid for a current month, be those dates from the actual month or the adjacent months ''' self._master = master self._type = ctype self._holidays = holidays # Assume month as current month self._month = tuple([dt.date.today().year, dt.date.today().month]) # Generate the snapshot for the current month self._snapshot = self.generateSnapshot() # Create empty dates from the snapshot self._dates = self.generateDefaultDates() def generateSnapshot(self): rt = list() if self._month is None: return rt # First day of month first_day = dt.date(self._month[0], self._month[1], 1) # Find day of first position in calendar grid offset = Calendar__Model.dayOf(first_day, self._type, self._master.getDataTree())[1] first_day -= dt.timedelta(offset) # Once first position is encountered, fill the holder array for i in range(Calendar__Model.MAX_DIM_X * Calendar__Model.MAX_DIM_Y): rt.append(first_day) first_day += dt.timedelta(1) return rt def generateDefaultDates(self): rt = list() for date in self._snapshot: created_date = self._master.createDate(date) self.setDateType(created_date) rt.append(created_date) return rt def addDate(self, date): if self._month is not None: if date.getModel().getDate() in self._snapshot: index = self._snapshot.index(date.getModel().getDate()) self.setDateType(date) self._dates[index] = date def addEventInCalendar(self, date, eic): if self._month is not None: if date in self._snapshot: index = self._snapshot.index(date) self._dates[index].addCalendarEvent(eic) def setDateType(self, date): current_type = date.getModel().getDateType(numeric=True) deduced_type = Date__Model.TYPE_WEEKDAY dt_date = date.getModel().getDate() dt_tuple = (dt_date.year, dt_date.month, dt_date.day) if calendar.weekday(*dt_tuple) in Calendar__Model.WEEKENDS: deduced_type = Date__Model.TYPE_WEEKEND if dt_date in self._holidays: deduced_type = Date__Model.TYPE_HOLYDAY if (dt_date.year, dt_date.month) != self._month: deduced_type = Date__Model.TYPE_GRAYDAY if current_type < deduced_type: current_type = deduced_type date.changeDateType(current_type) def _update(self): self._snapshot = self.generateSnapshot() self._dates = self.generateDefaultDates() # Add the required events events = self._master.getEvents() events_to_add = list() for event in events: if event.getModel().getInitDate().date() in self._snapshot: events_to_add.append(event) self._master.createEvents(events_to_add) def setMonth(self, month): self._month = month self._update() def getMonth(self): return self._month def monthSubtract(self): month = self._month if month[1] == 1: if month[0] == 1: return month else: return (month[0] - 1, 12) else: return (month[0], month[1] - 1) def monthAdd(self): month = self._month if month[1] == 12: if month[0] == 9999: return month else: return (month[0] + 1, 1) else: return (month[0], month[1] + 1) def setDataTree(self, datatree): self._datatree = datatree self._update() def getDataTree(self): return self._datatree def posInSnapshot(self, date): i = self._snapshot.index(date) return ceil((i + 1) / 7) - 1, (i) % 7 def getHolderDimensions(self): return Calendar__Model.MAX_DIM_X, Calendar__Model.MAX_DIM_Y def getDates(self): return self._dates def getType(self): return self._type
en
0.864603
Models for QtWidgets Returns the day of the week of a given date and the position of that day in the calendar grid. The returned text value of the day is recovered from the stringer module. # Get the day of the week of the selected date # Horizontal position in the grid is deduced from the selected leading day # Vertical position is deduced from the selected leading day and the # day of the first date of that month # Return the place in the calendar grid depending on the offset Calendar constructor, a calendar is an array of dates that should always be full, thus, initialy an array of empty dates (6x7), is array is called holders; a second empty array of dates is created and will replace eventually the dates of the respective holder date. Both arrays are validated through a snapshot array, the snapshot refers to the dates that fill the Calendar grid for a current month, be those dates from the actual month or the adjacent months # Assume month as current month # Generate the snapshot for the current month # Create empty dates from the snapshot # First day of month # Find day of first position in calendar grid # Once first position is encountered, fill the holder array # Add the required events
2.720298
3
python/orca/src/bigdl/orca/data/tf/data.py
Forest216/BigDL
0
9859
# # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import tensorflow as tf from bigdl.orca.tfpark.tf_dataset import TensorMeta from bigdl.dllib.utils import nest from bigdl.orca.data import SparkXShards from bigdl.dllib.utils import log4Error class Dataset(object): """ Represents a distributed set of elements backed by an RDD, which is created by applying tensorflow dataset transformations on each partitions. """ def __init__(self, xshards, create_dataset_fn): self.xshards = xshards self.create_dataset_fn = create_dataset_fn def as_graph_rdd(self, batch_per_shard, drop_remainder=True): create_dataset_fn = self.create_dataset_fn def to_dataset(iter): data_list = list(iter) import tensorflow as tf if not data_list: return [] datasets = [create_dataset_fn(data) for data in data_list] from functools import reduce dataset = reduce(lambda x, y: x.concatenate(y), datasets) dataset = dataset.batch(batch_per_shard, drop_remainder) iterator = dataset.make_initializable_iterator() train_next_ops = nest.flatten(iterator.get_next()) output_types = [t for t in nest.flatten(dataset.output_types)] output_types_enum = [t.as_datatype_enum for t in output_types] init_op_name = iterator.initializer.name table_init_op = tf.tables_initializer().name output_names = [op.name for op in train_next_ops] graph = train_next_ops[0].graph flatten_shapes = nest.flatten(dataset.output_shapes) flatten_shapes = [shape[1:] for shape in flatten_shapes] flatten_tensor_structure = [TensorMeta(dtype=output_types[i], shape=list(flatten_shapes[i]), name="zoo_input_{}".format(i)) for i in range(len(flatten_shapes))] structure = dataset.output_types if isinstance(structure, tf.DType): structure = (structure,) tensor_structure = nest.pack_sequence_as(structure, flatten_tensor_structure) meta_info = { "init_op_name": init_op_name, "table_init_op": table_init_op, "output_names": output_names, "output_types": output_types_enum, "tensor_structure": tensor_structure } return [(bytearray(graph.as_graph_def().SerializeToString()), meta_info)] graph_rdd_and_meta = self.xshards.rdd.mapPartitions(to_dataset) return graph_rdd_and_meta def as_tf_dataset_rdd(self): create_dataset_fn = self.create_dataset_fn def to_dataset(iter): data_list = list(iter) if not data_list: return [] from tensorflow.python.distribute.coordinator.values import serialize_dataset_to_graph datasets = [create_dataset_fn(data) for data in data_list] from functools import reduce dataset = reduce(lambda x, y: x.concatenate(y), datasets) ds_def = serialize_dataset_to_graph(dataset).numpy() elem_spec = dataset.element_spec return [{"ds_def": ds_def, "elem_spec": elem_spec}] tf_dataset_rdd = self.xshards.rdd.mapPartitions(to_dataset) return tf_dataset_rdd @staticmethod def from_tensor_slices(xshards): return TensorSliceDataset(xshards) @staticmethod def from_feature_table(tbl): from bigdl.friesian.feature import FeatureTable from bigdl.friesian.feature.utils import featuretable_to_xshards log4Error.invalidInputError(isinstance(tbl, FeatureTable), "Only Friesian FeatureTable is supported") xshards = featuretable_to_xshards(tbl) return TensorSliceDataset(xshards) def map(self, map_func): return MapDataset(self, map_func) class TensorSliceDataset(Dataset): def __init__(self, xshards): assert isinstance(xshards, SparkXShards), \ "only datasets backed by a SparkXShards are supported" self.xshards = xshards def create_dataset_fn(data): return tf.data.Dataset.from_tensor_slices(data) super().__init__(xshards, create_dataset_fn) class MapDataset(Dataset): def __init__(self, input_dataset, map_func): create_pre_dataset_fn = input_dataset.create_dataset_fn def create_dataset_fn(data): dataset = create_pre_dataset_fn(data) return dataset.map(map_func) super().__init__(xshards=input_dataset.xshards, create_dataset_fn=create_dataset_fn)
# # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import tensorflow as tf from bigdl.orca.tfpark.tf_dataset import TensorMeta from bigdl.dllib.utils import nest from bigdl.orca.data import SparkXShards from bigdl.dllib.utils import log4Error class Dataset(object): """ Represents a distributed set of elements backed by an RDD, which is created by applying tensorflow dataset transformations on each partitions. """ def __init__(self, xshards, create_dataset_fn): self.xshards = xshards self.create_dataset_fn = create_dataset_fn def as_graph_rdd(self, batch_per_shard, drop_remainder=True): create_dataset_fn = self.create_dataset_fn def to_dataset(iter): data_list = list(iter) import tensorflow as tf if not data_list: return [] datasets = [create_dataset_fn(data) for data in data_list] from functools import reduce dataset = reduce(lambda x, y: x.concatenate(y), datasets) dataset = dataset.batch(batch_per_shard, drop_remainder) iterator = dataset.make_initializable_iterator() train_next_ops = nest.flatten(iterator.get_next()) output_types = [t for t in nest.flatten(dataset.output_types)] output_types_enum = [t.as_datatype_enum for t in output_types] init_op_name = iterator.initializer.name table_init_op = tf.tables_initializer().name output_names = [op.name for op in train_next_ops] graph = train_next_ops[0].graph flatten_shapes = nest.flatten(dataset.output_shapes) flatten_shapes = [shape[1:] for shape in flatten_shapes] flatten_tensor_structure = [TensorMeta(dtype=output_types[i], shape=list(flatten_shapes[i]), name="zoo_input_{}".format(i)) for i in range(len(flatten_shapes))] structure = dataset.output_types if isinstance(structure, tf.DType): structure = (structure,) tensor_structure = nest.pack_sequence_as(structure, flatten_tensor_structure) meta_info = { "init_op_name": init_op_name, "table_init_op": table_init_op, "output_names": output_names, "output_types": output_types_enum, "tensor_structure": tensor_structure } return [(bytearray(graph.as_graph_def().SerializeToString()), meta_info)] graph_rdd_and_meta = self.xshards.rdd.mapPartitions(to_dataset) return graph_rdd_and_meta def as_tf_dataset_rdd(self): create_dataset_fn = self.create_dataset_fn def to_dataset(iter): data_list = list(iter) if not data_list: return [] from tensorflow.python.distribute.coordinator.values import serialize_dataset_to_graph datasets = [create_dataset_fn(data) for data in data_list] from functools import reduce dataset = reduce(lambda x, y: x.concatenate(y), datasets) ds_def = serialize_dataset_to_graph(dataset).numpy() elem_spec = dataset.element_spec return [{"ds_def": ds_def, "elem_spec": elem_spec}] tf_dataset_rdd = self.xshards.rdd.mapPartitions(to_dataset) return tf_dataset_rdd @staticmethod def from_tensor_slices(xshards): return TensorSliceDataset(xshards) @staticmethod def from_feature_table(tbl): from bigdl.friesian.feature import FeatureTable from bigdl.friesian.feature.utils import featuretable_to_xshards log4Error.invalidInputError(isinstance(tbl, FeatureTable), "Only Friesian FeatureTable is supported") xshards = featuretable_to_xshards(tbl) return TensorSliceDataset(xshards) def map(self, map_func): return MapDataset(self, map_func) class TensorSliceDataset(Dataset): def __init__(self, xshards): assert isinstance(xshards, SparkXShards), \ "only datasets backed by a SparkXShards are supported" self.xshards = xshards def create_dataset_fn(data): return tf.data.Dataset.from_tensor_slices(data) super().__init__(xshards, create_dataset_fn) class MapDataset(Dataset): def __init__(self, input_dataset, map_func): create_pre_dataset_fn = input_dataset.create_dataset_fn def create_dataset_fn(data): dataset = create_pre_dataset_fn(data) return dataset.map(map_func) super().__init__(xshards=input_dataset.xshards, create_dataset_fn=create_dataset_fn)
en
0.882935
# # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Represents a distributed set of elements backed by an RDD, which is created by applying tensorflow dataset transformations on each partitions.
2.094136
2
tools/generate_serialization_header.py
StableCoder/vulkan-mini-libs-2
1
9860
<gh_stars>1-10 #!/usr/bin/env python3 import sys import getopt import xml.etree.ElementTree as ET def processVendors(outFile, vendors): outFile.writelines(["\nconstexpr std::array<std::string_view, ", str( len(vendors)), "> vendors = {{\n"]) for vendor in vendors: outFile.writelines([' \"', vendor.tag, '\",\n']) outFile.write('}};\n') def processEnumValue(outFile, enum, value): if not value.get('value') is None: # Spitting out plain values outFile.write(value.get('value')) elif not value.get('bitpos') is None: # Bitflag outFile.writelines( ['0x', format(1 << int(value.get('bitpos')), '08X')]) elif not value.get('alias') is None: processEnumValue(outFile, enum, enum.find(value.get('alias'))) def processEnums(outFile, enums, vendors, first, last): for enum in enums: # Skip VkResult if enum.tag == 'VkResult': continue # Skip if there's no values, MSVC can't do zero-sized arrays if len(enum.findall('./')) == 0: continue outFile.writelines( ['\nconstexpr EnumValueSet ', enum.tag, 'Sets[] = {\n']) # Determine how much to chop off the front strName = enum.tag typeDigit = '' # Determine if type ends with vendor tag vendorName = '' for vendor in vendors: if strName.endswith(vendor.tag): vendorName = vendor.tag strName = strName[:-len(vendorName)] if strName[-1].isdigit(): typeDigit = strName[-1] strName = strName[:-1] if strName.endswith('FlagBits'): strName = strName[:-8] # Construct most likely enum prefix mainPrefix = '' for char in strName: if mainPrefix == '': mainPrefix += char elif char.isupper(): mainPrefix += '_' mainPrefix += char.upper() else: mainPrefix += char.upper() mainPrefix += '_' if typeDigit != '': mainPrefix += typeDigit mainPrefix += '_' current = first while current <= last: for value in enum.findall('./'): if int(value.get('first')) != current: continue outFile.write(" {\"") valueStr = value.tag if valueStr.startswith(mainPrefix): valueStr = valueStr[len(mainPrefix):] if vendorName != '' and valueStr.endswith(vendorName): valueStr = valueStr[:-len(vendorName)-1] if valueStr.endswith('_BIT'): valueStr = valueStr[:-4] outFile.write(valueStr) outFile.write("\", ") processEnumValue(outFile, enum, value) outFile.write("},\n") current += 1 outFile.write('};\n') def main(argv): inputFile = '' outputFile = '' try: opts, args = getopt.getopt(argv, 'i:o:', []) except getopt.GetoptError: print('Error parsing options') sys.exit(1) for opt, arg in opts: if opt == '-i': inputFile = arg elif opt == '-o': outputFile = arg if(inputFile == ''): print("Error: No Vulkan XML file specified") sys.exit(1) if(outputFile == ''): print("Error: No output file specified") sys.exit(1) try: dataXml = ET.parse(inputFile) dataRoot = dataXml.getroot() except: print("Error: Could not open input file: ", inputFile) sys.exit(1) firstVersion = int(dataRoot.get('first')) lastVersion = int(dataRoot.get('last')) outFile = open(outputFile, "w") # Common Header with open("common_header.txt") as fd: outFile.write(fd.read()) outFile.write('\n') # outFile.write("""#ifndef VK_VALUE_SERIALIZATION_HPP #define VK_VALUE_SERIALIZATION_HPP /* USAGE: To use, include this header where the declarations for the boolean checks are required. On *ONE* compilation unit, include the definition of `#define VK_VALUE_SERIALIZATION_CONFIG_MAIN` so that the definitions are compiled somewhere following the one definition rule. */ #include <vulkan/vulkan.h> #include <string> #include <string_view> """) # Static Asserts outFile.writelines(["\nstatic_assert(VK_HEADER_VERSION >= ", str( firstVersion), ", \"VK_HEADER_VERSION is from before the supported range.\");\n"]) outFile.writelines(["static_assert(VK_HEADER_VERSION <= ", str( lastVersion), ", \"VK_HEADER_VERSION is from after the supported range.\");\n"]) # Function Declarataions outFile.write(""" /** * @brief Macro that automatically stringifies the given Vulkan type for serialization * @param VKTYPE Actual Vulkan type * @param VALUE Value to be serialized * @param STRPTR Pointer to the string to store the serialization in. Only modified if true is * returned. * @return True if serialization was successful. False otherwise. */ #define VK_SERIALIZE(VKTYPE, VALUE, STRPTR) vk_serialize<VKTYPE>(#VKTYPE, VALUE, STRPTR) /** * @brief Macro that automatically stringifies the given Vulkan type for parsing * @param VKTYPE Actual Vulkan type * @param STRING String to be parsed * @param VALPTR Pointer to the value to store the parsed value in. Only modified if true is * returned. * @return True if serialization was successful. False otherwise. */ #define VK_PARSE(VKTYPE, STRING, VALPTR) vk_parse<VKTYPE>(#VKTYPE, STRING, VALPTR) /** * @brief Serializes a Vulkan enumerator/flag type (32-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString); /** * @brief Parses a Vulkan enumerator/flag serialized string (32-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue); /** * @brief Serializes a Vulkan enumerator/flag type (64-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString); /** * @brief Parses a Vulkan enumerator/flag serialized string (64-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue); /** * @brief Serializes a Vulkan enumerator/flag type * @tparam Vulkan type being serialized * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ template <typename T> bool vk_serialize(std::string_view vkType, T vkValue, std::string *pString) { return vk_serialize(vkType, static_cast<uint32_t>(vkValue), pString); } /** * @brief Parses a Vulkan enumerator/flag serialized string * @tparam Vulkan type being parsed * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ template <typename T> bool vk_parse(std::string_view vkType, std::string vkString, T *pValue) { uint32_t retVal = 0; auto found = vk_parse(vkType, vkString, &retVal); if (found) { *pValue = static_cast<T>(retVal); } return found; } """) # Definition Start outFile.write("\n#ifdef VK_VALUE_SERIALIZATION_CONFIG_MAIN\n") outFile.write("\n#include <algorithm>\n") outFile.write("#include <array>\n") outFile.write("#include <cstring>\n") outFile.write("\nnamespace {\n") # Vendors vendors = dataRoot.findall('vendors/') processVendors(outFile, vendors) # EnumSet Declaration outFile.write("\nstruct EnumValueSet {\n") outFile.write(" std::string_view name;\n") outFile.write(" int64_t value;\n") outFile.write("};\n") # Enums enums = dataRoot.findall('enums/') processEnums(outFile, enums, vendors, firstVersion, lastVersion) # Enum Type Declaration outFile.write("\nstruct EnumType {\n") outFile.write(" std::string_view name;\n") outFile.write(" EnumValueSet const* data;\n") outFile.write(" uint32_t count;\n") outFile.write(" bool allowEmpty;\n") outFile.write("};\n") # Enum Pointer Array outFile.writelines(["\nconstexpr std::array<EnumType, ", str( len(enums)-1), "> enumTypes = {{\n"]) # -1 for not doing VkResult for enum in enums: if enum.tag == 'VkResult': continue valueCount = len(enum.findall('./')) if valueCount == 0: outFile.writelines( [" {\"", str(enum.tag), "\", nullptr, 0, true},\n"]) else: allowEmpty = "true" for enumVal in enum.findall('./'): if enumVal.get('first') == enum.get('first'): allowEmpty = "false" outFile.writelines([" {\"", str(enum.tag), "\", ", str( enum.tag), "Sets, ", str(valueCount), ", ", allowEmpty, "},\n"]) outFile.write('}};\n') # Function definitions outFile.write(""" /** * @brief Removes a vendor tag from the end of the given string view * @param view String view to remove the vendor tag from * @return A string_view without the vendor tag, if it was suffixed */ std::string_view stripVendor(std::string_view view) { for (auto const &it : vendors) { // Don't strip if it's all that's left if (view == it) break; if (strncmp(view.data() + view.size() - it.size(), it.data(), it.size()) == 0) { view = view.substr(0, view.size() - it.size()); break; } } return view; } /** * @brief Strips '_BIT' from the end of a string, if there */ std::string_view stripBit(std::string_view view) { if (view.size() > strlen("_BIT")) { if (view.substr(view.size() - strlen("_BIT")) == "_BIT") { return view.substr(0, view.size() - strlen("_BIT")); } } return view; } bool getEnumType(std::string_view vkType, EnumValueSet const **ppStart, EnumValueSet const **ppEnd, bool *pAllowEmpty) { // Check for a conversion from Flags -> FlagBits std::string localString; if (vkType.rfind("Flags") != std::string::npos) { localString = vkType; auto it = localString.rfind("Flags"); localString = localString.replace(it, strlen("Flags"), "FlagBits"); vkType = localString; } // Try the original name for (auto const &it : enumTypes) { if (vkType == std::string_view{it.name}) { *ppStart = it.data; *ppEnd = it.data + it.count; *pAllowEmpty = it.allowEmpty; return true; } } // Try a vendor-stripped name vkType = stripVendor(vkType); for (auto const &it : enumTypes) { if (vkType == std::string_view{it.name}) { *ppStart = it.data; *ppEnd = it.data + it.count; *pAllowEmpty = it.allowEmpty; return true; } } return false; } /** * @brief Converts a Vulkan Flag typename into the prefix that is used for it's enums * @param typeName Name of the type to generate the Vk enum prefix for * @return Generated prefix string * * Any capitalized letters except for the first has an underscore inserted before it, an underscore * is added to the end, and all characters are converted to upper case. * * It also removed the 'Flags' or 'FlagBits' suffixes. */ std::string processEnumPrefix(std::string_view typeName) { // Flag Bits std::size_t flagBitsSize = strlen("FlagBits"); if (typeName.size() > flagBitsSize) { if (strncmp(typeName.data() + typeName.size() - flagBitsSize, "FlagBits", flagBitsSize) == 0) { typeName = typeName.substr(0, typeName.size() - strlen("FlagBits")); } } // Flags std::size_t flagsSize = strlen("Flags"); if (typeName.size() > flagsSize) { if (strncmp(typeName.data() + typeName.size() - flagsSize, "Flags", flagsSize) == 0) { typeName = typeName.substr(0, typeName.size() - strlen("Flags")); } } std::string retStr; for (auto it = typeName.begin(); it != typeName.end(); ++it) { if (it == typeName.begin()) { retStr += ::toupper(*it); } else if (::isupper(*it)) { retStr += '_'; retStr += *it; } else { retStr += toupper(*it); } } retStr += '_'; return retStr; } bool findValue(std::string_view findValue, std::string_view prefix, uint64_t *pValue, EnumValueSet const *start, EnumValueSet const *end) { // Remove the vendor tag suffix if it's on the value findValue = stripVendor(findValue); if (findValue[findValue.size() - 1] == '_') findValue = findValue.substr(0, findValue.size() - 1); // Remove '_BIT' if it's there findValue = stripBit(findValue); // Iterate until we find the value while (start != end) { if (findValue == start->name) { *pValue |= start->value; return true; } std::string prefixedName{prefix}; prefixedName += start->name; if (findValue == prefixedName) { *pValue |= start->value; return true; } ++start; } return false; } /** * @brief Takes a given string and formats it for use with parsing * @param str The string to format * @return Formatted string * * First, any non alphanumeric characters are trimmed from both ends of the string. * After than, any spaces are replaced with underscores, and finally all the characters are * capitalized. This will generate the string closest to the original ones found in the XML spec. */ std::string formatString(std::string str) { // Trim left std::size_t cutOffset = 0; for (auto c : str) { if (::isalnum(c)) break; else ++cutOffset; } str = str.substr(cutOffset); // Trim right cutOffset = 0; for (std::size_t i = 0; i < str.size(); ++i) { if (::isalnum(str[i])) cutOffset = i + 1; } str = str.substr(0, cutOffset); std::replace(str.begin(), str.end(), ' ', '_'); std::for_each(str.begin(), str.end(), [](char &c) { c = ::toupper(c); }); return str; } bool serializeBitmask(EnumValueSet const *end, EnumValueSet const *start, bool allowEmpty, uint64_t vkValue, std::string *pString) { --end; --start; if(start == end) { // If this is a non-existing bitmask, then return an empty string *pString = {}; return true; } std::string retStr; while (start != end) { if(vkValue == 0 && !retStr.empty()) { break; } if ((start->value & vkValue) == start->value) { // Found a compatible bit mask, add it if (!retStr.empty()) { retStr += " | "; } retStr += start->name; vkValue = vkValue ^ start->value; } --start; } if (vkValue != 0 || (retStr.empty() && !allowEmpty)) { // Failed to find a valid bitmask for the value return false; } *pString = retStr; return true; } bool serializeEnum(EnumValueSet const *start, EnumValueSet const *end, uint64_t vkValue, std::string *pString) { while (start != end) { if (start->value == vkValue) { *pString = start->name; return true; } ++start; } return false; } bool parseBitmask(std::string_view vkString, EnumValueSet const *start, EnumValueSet const *end, std::string_view prefix, uint64_t *pValue) { uint64_t retVal = 0; auto startCh = vkString.begin(); auto endCh = startCh; for (; endCh != vkString.end(); ++endCh) { if (*endCh == '|') { std::string token(startCh, endCh); token = formatString(token); bool foundVal = findValue(token, prefix, &retVal, start, end); if (!foundVal) return false; startCh = endCh + 1; } } if (startCh != endCh) { std::string token(startCh, endCh); token = formatString(token); bool foundVal = findValue(token, prefix, &retVal, start, end); if (!foundVal) return false; } *pValue = retVal; return true; } bool parseEnum(std::string_view vkString, EnumValueSet const *start, EnumValueSet const *end, std::string_view prefix, uint64_t *pValue) { uint64_t retVal = 0; std::string token = formatString(std::string{vkString}); bool found = findValue(token, prefix, &retVal, start, end); if (found) { *pValue = retVal; } return found; } } // namespace bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString) { if (vkType.empty()) { return false; } EnumValueSet const *start, *end; bool allowEmpty; if (!getEnumType(vkType, &start, &end, &allowEmpty)) { return false; } if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) { return serializeBitmask(start, end, allowEmpty, vkValue, pString); } return serializeEnum(start, end, vkValue, pString); } bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString) { return vk_serialize(vkType, static_cast<uint64_t>(vkValue), pString); } bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue) { if (vkType.empty()) { return false; } EnumValueSet const *start, *end; bool allowEmpty; if (!getEnumType(vkType, &start, &end, &allowEmpty)) { return false; } if (vkString.empty()) { if (allowEmpty) { *pValue = 0; return true; } else { return false; } } std::string prefix = processEnumPrefix(stripVendor(vkType)); if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) { return parseBitmask(vkString, start, end, prefix, pValue); } return parseEnum(vkString, start, end, prefix, pValue); } bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue) { uint64_t tempValue; if (vk_parse(vkType, vkString, &tempValue)) { *pValue = static_cast<uint32_t>(tempValue); return true; } return false; } """) # endif outFile.write("\n#endif // VK_VALUE_SERIALIZATION_CONFIG_MAIN\n") outFile.write("#endif // VK_VALUE_SERIALIZATION_HPP\n") outFile.close() if __name__ == "__main__": main(sys.argv[1:])
#!/usr/bin/env python3 import sys import getopt import xml.etree.ElementTree as ET def processVendors(outFile, vendors): outFile.writelines(["\nconstexpr std::array<std::string_view, ", str( len(vendors)), "> vendors = {{\n"]) for vendor in vendors: outFile.writelines([' \"', vendor.tag, '\",\n']) outFile.write('}};\n') def processEnumValue(outFile, enum, value): if not value.get('value') is None: # Spitting out plain values outFile.write(value.get('value')) elif not value.get('bitpos') is None: # Bitflag outFile.writelines( ['0x', format(1 << int(value.get('bitpos')), '08X')]) elif not value.get('alias') is None: processEnumValue(outFile, enum, enum.find(value.get('alias'))) def processEnums(outFile, enums, vendors, first, last): for enum in enums: # Skip VkResult if enum.tag == 'VkResult': continue # Skip if there's no values, MSVC can't do zero-sized arrays if len(enum.findall('./')) == 0: continue outFile.writelines( ['\nconstexpr EnumValueSet ', enum.tag, 'Sets[] = {\n']) # Determine how much to chop off the front strName = enum.tag typeDigit = '' # Determine if type ends with vendor tag vendorName = '' for vendor in vendors: if strName.endswith(vendor.tag): vendorName = vendor.tag strName = strName[:-len(vendorName)] if strName[-1].isdigit(): typeDigit = strName[-1] strName = strName[:-1] if strName.endswith('FlagBits'): strName = strName[:-8] # Construct most likely enum prefix mainPrefix = '' for char in strName: if mainPrefix == '': mainPrefix += char elif char.isupper(): mainPrefix += '_' mainPrefix += char.upper() else: mainPrefix += char.upper() mainPrefix += '_' if typeDigit != '': mainPrefix += typeDigit mainPrefix += '_' current = first while current <= last: for value in enum.findall('./'): if int(value.get('first')) != current: continue outFile.write(" {\"") valueStr = value.tag if valueStr.startswith(mainPrefix): valueStr = valueStr[len(mainPrefix):] if vendorName != '' and valueStr.endswith(vendorName): valueStr = valueStr[:-len(vendorName)-1] if valueStr.endswith('_BIT'): valueStr = valueStr[:-4] outFile.write(valueStr) outFile.write("\", ") processEnumValue(outFile, enum, value) outFile.write("},\n") current += 1 outFile.write('};\n') def main(argv): inputFile = '' outputFile = '' try: opts, args = getopt.getopt(argv, 'i:o:', []) except getopt.GetoptError: print('Error parsing options') sys.exit(1) for opt, arg in opts: if opt == '-i': inputFile = arg elif opt == '-o': outputFile = arg if(inputFile == ''): print("Error: No Vulkan XML file specified") sys.exit(1) if(outputFile == ''): print("Error: No output file specified") sys.exit(1) try: dataXml = ET.parse(inputFile) dataRoot = dataXml.getroot() except: print("Error: Could not open input file: ", inputFile) sys.exit(1) firstVersion = int(dataRoot.get('first')) lastVersion = int(dataRoot.get('last')) outFile = open(outputFile, "w") # Common Header with open("common_header.txt") as fd: outFile.write(fd.read()) outFile.write('\n') # outFile.write("""#ifndef VK_VALUE_SERIALIZATION_HPP #define VK_VALUE_SERIALIZATION_HPP /* USAGE: To use, include this header where the declarations for the boolean checks are required. On *ONE* compilation unit, include the definition of `#define VK_VALUE_SERIALIZATION_CONFIG_MAIN` so that the definitions are compiled somewhere following the one definition rule. */ #include <vulkan/vulkan.h> #include <string> #include <string_view> """) # Static Asserts outFile.writelines(["\nstatic_assert(VK_HEADER_VERSION >= ", str( firstVersion), ", \"VK_HEADER_VERSION is from before the supported range.\");\n"]) outFile.writelines(["static_assert(VK_HEADER_VERSION <= ", str( lastVersion), ", \"VK_HEADER_VERSION is from after the supported range.\");\n"]) # Function Declarataions outFile.write(""" /** * @brief Macro that automatically stringifies the given Vulkan type for serialization * @param VKTYPE Actual Vulkan type * @param VALUE Value to be serialized * @param STRPTR Pointer to the string to store the serialization in. Only modified if true is * returned. * @return True if serialization was successful. False otherwise. */ #define VK_SERIALIZE(VKTYPE, VALUE, STRPTR) vk_serialize<VKTYPE>(#VKTYPE, VALUE, STRPTR) /** * @brief Macro that automatically stringifies the given Vulkan type for parsing * @param VKTYPE Actual Vulkan type * @param STRING String to be parsed * @param VALPTR Pointer to the value to store the parsed value in. Only modified if true is * returned. * @return True if serialization was successful. False otherwise. */ #define VK_PARSE(VKTYPE, STRING, VALPTR) vk_parse<VKTYPE>(#VKTYPE, STRING, VALPTR) /** * @brief Serializes a Vulkan enumerator/flag type (32-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString); /** * @brief Parses a Vulkan enumerator/flag serialized string (32-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue); /** * @brief Serializes a Vulkan enumerator/flag type (64-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString); /** * @brief Parses a Vulkan enumerator/flag serialized string (64-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue); /** * @brief Serializes a Vulkan enumerator/flag type * @tparam Vulkan type being serialized * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ template <typename T> bool vk_serialize(std::string_view vkType, T vkValue, std::string *pString) { return vk_serialize(vkType, static_cast<uint32_t>(vkValue), pString); } /** * @brief Parses a Vulkan enumerator/flag serialized string * @tparam Vulkan type being parsed * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ template <typename T> bool vk_parse(std::string_view vkType, std::string vkString, T *pValue) { uint32_t retVal = 0; auto found = vk_parse(vkType, vkString, &retVal); if (found) { *pValue = static_cast<T>(retVal); } return found; } """) # Definition Start outFile.write("\n#ifdef VK_VALUE_SERIALIZATION_CONFIG_MAIN\n") outFile.write("\n#include <algorithm>\n") outFile.write("#include <array>\n") outFile.write("#include <cstring>\n") outFile.write("\nnamespace {\n") # Vendors vendors = dataRoot.findall('vendors/') processVendors(outFile, vendors) # EnumSet Declaration outFile.write("\nstruct EnumValueSet {\n") outFile.write(" std::string_view name;\n") outFile.write(" int64_t value;\n") outFile.write("};\n") # Enums enums = dataRoot.findall('enums/') processEnums(outFile, enums, vendors, firstVersion, lastVersion) # Enum Type Declaration outFile.write("\nstruct EnumType {\n") outFile.write(" std::string_view name;\n") outFile.write(" EnumValueSet const* data;\n") outFile.write(" uint32_t count;\n") outFile.write(" bool allowEmpty;\n") outFile.write("};\n") # Enum Pointer Array outFile.writelines(["\nconstexpr std::array<EnumType, ", str( len(enums)-1), "> enumTypes = {{\n"]) # -1 for not doing VkResult for enum in enums: if enum.tag == 'VkResult': continue valueCount = len(enum.findall('./')) if valueCount == 0: outFile.writelines( [" {\"", str(enum.tag), "\", nullptr, 0, true},\n"]) else: allowEmpty = "true" for enumVal in enum.findall('./'): if enumVal.get('first') == enum.get('first'): allowEmpty = "false" outFile.writelines([" {\"", str(enum.tag), "\", ", str( enum.tag), "Sets, ", str(valueCount), ", ", allowEmpty, "},\n"]) outFile.write('}};\n') # Function definitions outFile.write(""" /** * @brief Removes a vendor tag from the end of the given string view * @param view String view to remove the vendor tag from * @return A string_view without the vendor tag, if it was suffixed */ std::string_view stripVendor(std::string_view view) { for (auto const &it : vendors) { // Don't strip if it's all that's left if (view == it) break; if (strncmp(view.data() + view.size() - it.size(), it.data(), it.size()) == 0) { view = view.substr(0, view.size() - it.size()); break; } } return view; } /** * @brief Strips '_BIT' from the end of a string, if there */ std::string_view stripBit(std::string_view view) { if (view.size() > strlen("_BIT")) { if (view.substr(view.size() - strlen("_BIT")) == "_BIT") { return view.substr(0, view.size() - strlen("_BIT")); } } return view; } bool getEnumType(std::string_view vkType, EnumValueSet const **ppStart, EnumValueSet const **ppEnd, bool *pAllowEmpty) { // Check for a conversion from Flags -> FlagBits std::string localString; if (vkType.rfind("Flags") != std::string::npos) { localString = vkType; auto it = localString.rfind("Flags"); localString = localString.replace(it, strlen("Flags"), "FlagBits"); vkType = localString; } // Try the original name for (auto const &it : enumTypes) { if (vkType == std::string_view{it.name}) { *ppStart = it.data; *ppEnd = it.data + it.count; *pAllowEmpty = it.allowEmpty; return true; } } // Try a vendor-stripped name vkType = stripVendor(vkType); for (auto const &it : enumTypes) { if (vkType == std::string_view{it.name}) { *ppStart = it.data; *ppEnd = it.data + it.count; *pAllowEmpty = it.allowEmpty; return true; } } return false; } /** * @brief Converts a Vulkan Flag typename into the prefix that is used for it's enums * @param typeName Name of the type to generate the Vk enum prefix for * @return Generated prefix string * * Any capitalized letters except for the first has an underscore inserted before it, an underscore * is added to the end, and all characters are converted to upper case. * * It also removed the 'Flags' or 'FlagBits' suffixes. */ std::string processEnumPrefix(std::string_view typeName) { // Flag Bits std::size_t flagBitsSize = strlen("FlagBits"); if (typeName.size() > flagBitsSize) { if (strncmp(typeName.data() + typeName.size() - flagBitsSize, "FlagBits", flagBitsSize) == 0) { typeName = typeName.substr(0, typeName.size() - strlen("FlagBits")); } } // Flags std::size_t flagsSize = strlen("Flags"); if (typeName.size() > flagsSize) { if (strncmp(typeName.data() + typeName.size() - flagsSize, "Flags", flagsSize) == 0) { typeName = typeName.substr(0, typeName.size() - strlen("Flags")); } } std::string retStr; for (auto it = typeName.begin(); it != typeName.end(); ++it) { if (it == typeName.begin()) { retStr += ::toupper(*it); } else if (::isupper(*it)) { retStr += '_'; retStr += *it; } else { retStr += toupper(*it); } } retStr += '_'; return retStr; } bool findValue(std::string_view findValue, std::string_view prefix, uint64_t *pValue, EnumValueSet const *start, EnumValueSet const *end) { // Remove the vendor tag suffix if it's on the value findValue = stripVendor(findValue); if (findValue[findValue.size() - 1] == '_') findValue = findValue.substr(0, findValue.size() - 1); // Remove '_BIT' if it's there findValue = stripBit(findValue); // Iterate until we find the value while (start != end) { if (findValue == start->name) { *pValue |= start->value; return true; } std::string prefixedName{prefix}; prefixedName += start->name; if (findValue == prefixedName) { *pValue |= start->value; return true; } ++start; } return false; } /** * @brief Takes a given string and formats it for use with parsing * @param str The string to format * @return Formatted string * * First, any non alphanumeric characters are trimmed from both ends of the string. * After than, any spaces are replaced with underscores, and finally all the characters are * capitalized. This will generate the string closest to the original ones found in the XML spec. */ std::string formatString(std::string str) { // Trim left std::size_t cutOffset = 0; for (auto c : str) { if (::isalnum(c)) break; else ++cutOffset; } str = str.substr(cutOffset); // Trim right cutOffset = 0; for (std::size_t i = 0; i < str.size(); ++i) { if (::isalnum(str[i])) cutOffset = i + 1; } str = str.substr(0, cutOffset); std::replace(str.begin(), str.end(), ' ', '_'); std::for_each(str.begin(), str.end(), [](char &c) { c = ::toupper(c); }); return str; } bool serializeBitmask(EnumValueSet const *end, EnumValueSet const *start, bool allowEmpty, uint64_t vkValue, std::string *pString) { --end; --start; if(start == end) { // If this is a non-existing bitmask, then return an empty string *pString = {}; return true; } std::string retStr; while (start != end) { if(vkValue == 0 && !retStr.empty()) { break; } if ((start->value & vkValue) == start->value) { // Found a compatible bit mask, add it if (!retStr.empty()) { retStr += " | "; } retStr += start->name; vkValue = vkValue ^ start->value; } --start; } if (vkValue != 0 || (retStr.empty() && !allowEmpty)) { // Failed to find a valid bitmask for the value return false; } *pString = retStr; return true; } bool serializeEnum(EnumValueSet const *start, EnumValueSet const *end, uint64_t vkValue, std::string *pString) { while (start != end) { if (start->value == vkValue) { *pString = start->name; return true; } ++start; } return false; } bool parseBitmask(std::string_view vkString, EnumValueSet const *start, EnumValueSet const *end, std::string_view prefix, uint64_t *pValue) { uint64_t retVal = 0; auto startCh = vkString.begin(); auto endCh = startCh; for (; endCh != vkString.end(); ++endCh) { if (*endCh == '|') { std::string token(startCh, endCh); token = formatString(token); bool foundVal = findValue(token, prefix, &retVal, start, end); if (!foundVal) return false; startCh = endCh + 1; } } if (startCh != endCh) { std::string token(startCh, endCh); token = formatString(token); bool foundVal = findValue(token, prefix, &retVal, start, end); if (!foundVal) return false; } *pValue = retVal; return true; } bool parseEnum(std::string_view vkString, EnumValueSet const *start, EnumValueSet const *end, std::string_view prefix, uint64_t *pValue) { uint64_t retVal = 0; std::string token = formatString(std::string{vkString}); bool found = findValue(token, prefix, &retVal, start, end); if (found) { *pValue = retVal; } return found; } } // namespace bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString) { if (vkType.empty()) { return false; } EnumValueSet const *start, *end; bool allowEmpty; if (!getEnumType(vkType, &start, &end, &allowEmpty)) { return false; } if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) { return serializeBitmask(start, end, allowEmpty, vkValue, pString); } return serializeEnum(start, end, vkValue, pString); } bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString) { return vk_serialize(vkType, static_cast<uint64_t>(vkValue), pString); } bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue) { if (vkType.empty()) { return false; } EnumValueSet const *start, *end; bool allowEmpty; if (!getEnumType(vkType, &start, &end, &allowEmpty)) { return false; } if (vkString.empty()) { if (allowEmpty) { *pValue = 0; return true; } else { return false; } } std::string prefix = processEnumPrefix(stripVendor(vkType)); if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) { return parseBitmask(vkString, start, end, prefix, pValue); } return parseEnum(vkString, start, end, prefix, pValue); } bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue) { uint64_t tempValue; if (vk_parse(vkType, vkString, &tempValue)) { *pValue = static_cast<uint32_t>(tempValue); return true; } return false; } """) # endif outFile.write("\n#endif // VK_VALUE_SERIALIZATION_CONFIG_MAIN\n") outFile.write("#endif // VK_VALUE_SERIALIZATION_HPP\n") outFile.close() if __name__ == "__main__": main(sys.argv[1:])
en
0.324926
#!/usr/bin/env python3 # Spitting out plain values # Bitflag # Skip VkResult # Skip if there's no values, MSVC can't do zero-sized arrays # Determine how much to chop off the front # Determine if type ends with vendor tag # Construct most likely enum prefix # Common Header # #ifndef VK_VALUE_SERIALIZATION_HPP #define VK_VALUE_SERIALIZATION_HPP /* USAGE: To use, include this header where the declarations for the boolean checks are required. On *ONE* compilation unit, include the definition of `#define VK_VALUE_SERIALIZATION_CONFIG_MAIN` so that the definitions are compiled somewhere following the one definition rule. */ #include <vulkan/vulkan.h> #include <string> #include <string_view> # Static Asserts # Function Declarataions /** * @brief Macro that automatically stringifies the given Vulkan type for serialization * @param VKTYPE Actual Vulkan type * @param VALUE Value to be serialized * @param STRPTR Pointer to the string to store the serialization in. Only modified if true is * returned. * @return True if serialization was successful. False otherwise. */ #define VK_SERIALIZE(VKTYPE, VALUE, STRPTR) vk_serialize<VKTYPE>(#VKTYPE, VALUE, STRPTR) /** * @brief Macro that automatically stringifies the given Vulkan type for parsing * @param VKTYPE Actual Vulkan type * @param STRING String to be parsed * @param VALPTR Pointer to the value to store the parsed value in. Only modified if true is * returned. * @return True if serialization was successful. False otherwise. */ #define VK_PARSE(VKTYPE, STRING, VALPTR) vk_parse<VKTYPE>(#VKTYPE, STRING, VALPTR) /** * @brief Serializes a Vulkan enumerator/flag type (32-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString); /** * @brief Parses a Vulkan enumerator/flag serialized string (32-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue); /** * @brief Serializes a Vulkan enumerator/flag type (64-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString); /** * @brief Parses a Vulkan enumerator/flag serialized string (64-bit) * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue); /** * @brief Serializes a Vulkan enumerator/flag type * @tparam Vulkan type being serialized * @param vkType Name of the Vulkan enumerator/flag type * @param vkValue Value being serialized * @param pString Pointer to a string that will be modified with the serialized value. Only modified * if true is returned. * @return True the value was successfully serialized. False otherwise. */ template <typename T> bool vk_serialize(std::string_view vkType, T vkValue, std::string *pString) { return vk_serialize(vkType, static_cast<uint32_t>(vkValue), pString); } /** * @brief Parses a Vulkan enumerator/flag serialized string * @tparam Vulkan type being parsed * @param vkType Name of the Vulkan enumerator/flag type * @param vkString String being parsed * @param pValue Pointer to a value that will be modified with the parsed value. Only modified if * true is returned. * @return True the value was successfully serialized. False otherwise. */ template <typename T> bool vk_parse(std::string_view vkType, std::string vkString, T *pValue) { uint32_t retVal = 0; auto found = vk_parse(vkType, vkString, &retVal); if (found) { *pValue = static_cast<T>(retVal); } return found; } # Definition Start #ifdef VK_VALUE_SERIALIZATION_CONFIG_MAIN\n") #include <algorithm>\n") # Vendors # EnumSet Declaration # Enums # Enum Type Declaration # Enum Pointer Array # -1 for not doing VkResult # Function definitions /** * @brief Removes a vendor tag from the end of the given string view * @param view String view to remove the vendor tag from * @return A string_view without the vendor tag, if it was suffixed */ std::string_view stripVendor(std::string_view view) { for (auto const &it : vendors) { // Don't strip if it's all that's left if (view == it) break; if (strncmp(view.data() + view.size() - it.size(), it.data(), it.size()) == 0) { view = view.substr(0, view.size() - it.size()); break; } } return view; } /** * @brief Strips '_BIT' from the end of a string, if there */ std::string_view stripBit(std::string_view view) { if (view.size() > strlen("_BIT")) { if (view.substr(view.size() - strlen("_BIT")) == "_BIT") { return view.substr(0, view.size() - strlen("_BIT")); } } return view; } bool getEnumType(std::string_view vkType, EnumValueSet const **ppStart, EnumValueSet const **ppEnd, bool *pAllowEmpty) { // Check for a conversion from Flags -> FlagBits std::string localString; if (vkType.rfind("Flags") != std::string::npos) { localString = vkType; auto it = localString.rfind("Flags"); localString = localString.replace(it, strlen("Flags"), "FlagBits"); vkType = localString; } // Try the original name for (auto const &it : enumTypes) { if (vkType == std::string_view{it.name}) { *ppStart = it.data; *ppEnd = it.data + it.count; *pAllowEmpty = it.allowEmpty; return true; } } // Try a vendor-stripped name vkType = stripVendor(vkType); for (auto const &it : enumTypes) { if (vkType == std::string_view{it.name}) { *ppStart = it.data; *ppEnd = it.data + it.count; *pAllowEmpty = it.allowEmpty; return true; } } return false; } /** * @brief Converts a Vulkan Flag typename into the prefix that is used for it's enums * @param typeName Name of the type to generate the Vk enum prefix for * @return Generated prefix string * * Any capitalized letters except for the first has an underscore inserted before it, an underscore * is added to the end, and all characters are converted to upper case. * * It also removed the 'Flags' or 'FlagBits' suffixes. */ std::string processEnumPrefix(std::string_view typeName) { // Flag Bits std::size_t flagBitsSize = strlen("FlagBits"); if (typeName.size() > flagBitsSize) { if (strncmp(typeName.data() + typeName.size() - flagBitsSize, "FlagBits", flagBitsSize) == 0) { typeName = typeName.substr(0, typeName.size() - strlen("FlagBits")); } } // Flags std::size_t flagsSize = strlen("Flags"); if (typeName.size() > flagsSize) { if (strncmp(typeName.data() + typeName.size() - flagsSize, "Flags", flagsSize) == 0) { typeName = typeName.substr(0, typeName.size() - strlen("Flags")); } } std::string retStr; for (auto it = typeName.begin(); it != typeName.end(); ++it) { if (it == typeName.begin()) { retStr += ::toupper(*it); } else if (::isupper(*it)) { retStr += '_'; retStr += *it; } else { retStr += toupper(*it); } } retStr += '_'; return retStr; } bool findValue(std::string_view findValue, std::string_view prefix, uint64_t *pValue, EnumValueSet const *start, EnumValueSet const *end) { // Remove the vendor tag suffix if it's on the value findValue = stripVendor(findValue); if (findValue[findValue.size() - 1] == '_') findValue = findValue.substr(0, findValue.size() - 1); // Remove '_BIT' if it's there findValue = stripBit(findValue); // Iterate until we find the value while (start != end) { if (findValue == start->name) { *pValue |= start->value; return true; } std::string prefixedName{prefix}; prefixedName += start->name; if (findValue == prefixedName) { *pValue |= start->value; return true; } ++start; } return false; } /** * @brief Takes a given string and formats it for use with parsing * @param str The string to format * @return Formatted string * * First, any non alphanumeric characters are trimmed from both ends of the string. * After than, any spaces are replaced with underscores, and finally all the characters are * capitalized. This will generate the string closest to the original ones found in the XML spec. */ std::string formatString(std::string str) { // Trim left std::size_t cutOffset = 0; for (auto c : str) { if (::isalnum(c)) break; else ++cutOffset; } str = str.substr(cutOffset); // Trim right cutOffset = 0; for (std::size_t i = 0; i < str.size(); ++i) { if (::isalnum(str[i])) cutOffset = i + 1; } str = str.substr(0, cutOffset); std::replace(str.begin(), str.end(), ' ', '_'); std::for_each(str.begin(), str.end(), [](char &c) { c = ::toupper(c); }); return str; } bool serializeBitmask(EnumValueSet const *end, EnumValueSet const *start, bool allowEmpty, uint64_t vkValue, std::string *pString) { --end; --start; if(start == end) { // If this is a non-existing bitmask, then return an empty string *pString = {}; return true; } std::string retStr; while (start != end) { if(vkValue == 0 && !retStr.empty()) { break; } if ((start->value & vkValue) == start->value) { // Found a compatible bit mask, add it if (!retStr.empty()) { retStr += " | "; } retStr += start->name; vkValue = vkValue ^ start->value; } --start; } if (vkValue != 0 || (retStr.empty() && !allowEmpty)) { // Failed to find a valid bitmask for the value return false; } *pString = retStr; return true; } bool serializeEnum(EnumValueSet const *start, EnumValueSet const *end, uint64_t vkValue, std::string *pString) { while (start != end) { if (start->value == vkValue) { *pString = start->name; return true; } ++start; } return false; } bool parseBitmask(std::string_view vkString, EnumValueSet const *start, EnumValueSet const *end, std::string_view prefix, uint64_t *pValue) { uint64_t retVal = 0; auto startCh = vkString.begin(); auto endCh = startCh; for (; endCh != vkString.end(); ++endCh) { if (*endCh == '|') { std::string token(startCh, endCh); token = formatString(token); bool foundVal = findValue(token, prefix, &retVal, start, end); if (!foundVal) return false; startCh = endCh + 1; } } if (startCh != endCh) { std::string token(startCh, endCh); token = formatString(token); bool foundVal = findValue(token, prefix, &retVal, start, end); if (!foundVal) return false; } *pValue = retVal; return true; } bool parseEnum(std::string_view vkString, EnumValueSet const *start, EnumValueSet const *end, std::string_view prefix, uint64_t *pValue) { uint64_t retVal = 0; std::string token = formatString(std::string{vkString}); bool found = findValue(token, prefix, &retVal, start, end); if (found) { *pValue = retVal; } return found; } } // namespace bool vk_serialize(std::string_view vkType, uint64_t vkValue, std::string *pString) { if (vkType.empty()) { return false; } EnumValueSet const *start, *end; bool allowEmpty; if (!getEnumType(vkType, &start, &end, &allowEmpty)) { return false; } if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) { return serializeBitmask(start, end, allowEmpty, vkValue, pString); } return serializeEnum(start, end, vkValue, pString); } bool vk_serialize(std::string_view vkType, uint32_t vkValue, std::string *pString) { return vk_serialize(vkType, static_cast<uint64_t>(vkValue), pString); } bool vk_parse(std::string_view vkType, std::string vkString, uint64_t *pValue) { if (vkType.empty()) { return false; } EnumValueSet const *start, *end; bool allowEmpty; if (!getEnumType(vkType, &start, &end, &allowEmpty)) { return false; } if (vkString.empty()) { if (allowEmpty) { *pValue = 0; return true; } else { return false; } } std::string prefix = processEnumPrefix(stripVendor(vkType)); if (vkType.find("Flags") != std::string::npos || vkType.find("FlagBits") != std::string::npos) { return parseBitmask(vkString, start, end, prefix, pValue); } return parseEnum(vkString, start, end, prefix, pValue); } bool vk_parse(std::string_view vkType, std::string vkString, uint32_t *pValue) { uint64_t tempValue; if (vk_parse(vkType, vkString, &tempValue)) { *pValue = static_cast<uint32_t>(tempValue); return true; } return false; } # endif #endif // VK_VALUE_SERIALIZATION_CONFIG_MAIN\n")
2.413119
2
ampel/cli/AbsStockCommand.py
AmpelProject/Ampel-core
5
9861
#!/usr/bin/env python # -*- coding: utf-8 -*- # File : Ampel-core/ampel/cli/AbsStockCommand.py # License : BSD-3-Clause # Author : vb <<EMAIL>> # Date : 25.03.2021 # Last Modified Date: 25.03.2021 # Last Modified By : vb <<EMAIL>> from typing import Dict, Any, Optional, Union, Literal from ampel.cli.ArgParserBuilder import ArgParserBuilder from ampel.cli.MaybeIntAction import MaybeIntAction from ampel.cli.LoadJSONAction import LoadJSONAction from ampel.cli.AbsCoreCommand import AbsCoreCommand from ampel.mongo.utils import maybe_match_array from ampel.model.UnitModel import UnitModel from ampel.model.time.UnixTimeModel import UnixTimeModel from ampel.model.time.TimeStringModel import TimeStringModel from ampel.model.time.TimeLastRunModel import TimeLastRunModel from ampel.model.time.TimeDeltaModel import TimeDeltaModel from ampel.model.time.TimeConstraintModel import TimeConstraintModel class AbsStockCommand(AbsCoreCommand, abstract=True): """ Base class for commands selecting/matching stock(s) """ @staticmethod def get_select_args_help() -> Dict[str, str]: return { # Required 'config': 'Path to an ampel config file (yaml/json)', # Optional 'secrets': 'Path to a YAML secrets store in sops format', 'log-profile': 'One of: default, compact, headerless, verbose, debug', 'id-mapper': 'Convert stock ids using the provided id mapper (ex: ZTFIdMapper)', # Selection 'stock': 'Stock id(s) (OR matched if multi-valued)', 'channel': 'Channel(s)', 'created-after-ts': 'Created after unix timestamp', 'created-after-str': 'Created after date-time iso string', 'created-after-delta': 'Created after time delta', 'created-after-process': 'Created after last run of process with name', 'created-before-ts': 'Created before unix timestamp', 'created-before-str': 'Created before date-time iso string', 'created-before-delta': 'Created before time delta', 'created-before-process': 'Created before last run of process with name', 'updated-after-ts': 'Updated after unix timestamp', 'updated-after-str': 'Updated after date-time iso string', 'updated-after-delta': 'Updated after time delta', 'updated-after-process': 'Updated after last run of process with name', 'updated-before-ts': 'Updated before unix timestamp', 'updated-before-str': 'Updated before date-time iso string', 'updated-before-delta': 'Updated before time delta', 'updated-before-process': 'Updated before last run of process with name', 'custom-match': 'Custom mongodb match as JSON string (ex: {"body.aKey": {"$gt": 1}})', } def add_selection_args(self, builder: ArgParserBuilder) -> None: # Selection args builder.add_group('match', 'Stock selection arguments') builder.add_arg('match', "stock", action=MaybeIntAction, nargs="+") builder.add_x_args('match', {'name': 'created-before-str'}, {'name': 'created-before-ts', 'type': int}, {'name': 'created-before-delta', 'action': LoadJSONAction}, {'name': 'created-before-process'} ) builder.add_x_args('match', {'name': 'created-after-str'}, {'name': 'created-after-ts', 'type': int}, {'name': 'created-after-delta', 'action': LoadJSONAction}, {'name': 'created-after-process'} ) builder.add_x_args('match', {'name': 'updated-before-str'}, {'name': 'updated-before-ts', 'type': int}, {'name': 'updated-before-delta', 'action': LoadJSONAction}, {'name': 'updated-before-process'} ) builder.add_x_args('match', {'name': 'updated-after-str'}, {'name': 'updated-after-ts', 'type': int}, {'name': 'updated-after-delta', 'action': LoadJSONAction}, {'name': 'updated-after-process'} ) builder.create_logic_args('match', "channel", "Channel") builder.create_logic_args('match', "with-tag", "Tag") builder.create_logic_args('match', "without-tag", "Tag", excl=True) builder.add_arg('match', "custom-match", metavar="#", action=LoadJSONAction) def get_tag(self, args: Dict[str, Any]) -> Optional[Dict[Union[Literal['with'], Literal['without']], Dict]]: tag: Optional[Dict[Union[Literal['with'], Literal['without']], Dict]] = None if args.get('with_tag'): tag = {'with': args['with_tag']} if args.get('without_tag'): if tag is None: tag = {} tag['without'] = args['without_tag'] return tag def build_select_model(self, args: Dict[str, Any]) -> UnitModel: conf = { "created": self.get_time_model("created", args), "updated": self.get_time_model("updated", args), 'channel': args['channel'], 'custom': args['custom_match'] } if args.get('tag'): conf['tag'] = self.get_tag(args) if (stock := args.get('stock')): conf['custom'] = { '_id': stock if isinstance(stock, (int, bytes, str)) else maybe_match_array(stock) } return UnitModel(unit="T3StockSelector", config=conf) def get_time_model(self, prefix: str, args: Dict[str, Any]) -> TimeConstraintModel: d: Dict[str, Any] = {'after': None, 'before': None} for when in ('after', 'before'): if args.get(x := f"{prefix}_{when}_ts"): d[when] = UnixTimeModel(match_type='unix_time', value=args[x]) elif args.get(x := f"{prefix}_{when}_str"): d[when] = TimeStringModel(match_type='time_string', dateTimeStr=args[x], dateTimeFormat="%Y%m%dT%H%M%S") elif args.get(x := f"{prefix}_{when}_delta"): d[when] = TimeDeltaModel(match_type='time_delta', **args[x]) elif args.get(x := f"{prefix}_{when}_process"): d[when] = TimeLastRunModel(match_type='time_last_run', process_name=args[x]) return TimeConstraintModel(**d)
#!/usr/bin/env python # -*- coding: utf-8 -*- # File : Ampel-core/ampel/cli/AbsStockCommand.py # License : BSD-3-Clause # Author : vb <<EMAIL>> # Date : 25.03.2021 # Last Modified Date: 25.03.2021 # Last Modified By : vb <<EMAIL>> from typing import Dict, Any, Optional, Union, Literal from ampel.cli.ArgParserBuilder import ArgParserBuilder from ampel.cli.MaybeIntAction import MaybeIntAction from ampel.cli.LoadJSONAction import LoadJSONAction from ampel.cli.AbsCoreCommand import AbsCoreCommand from ampel.mongo.utils import maybe_match_array from ampel.model.UnitModel import UnitModel from ampel.model.time.UnixTimeModel import UnixTimeModel from ampel.model.time.TimeStringModel import TimeStringModel from ampel.model.time.TimeLastRunModel import TimeLastRunModel from ampel.model.time.TimeDeltaModel import TimeDeltaModel from ampel.model.time.TimeConstraintModel import TimeConstraintModel class AbsStockCommand(AbsCoreCommand, abstract=True): """ Base class for commands selecting/matching stock(s) """ @staticmethod def get_select_args_help() -> Dict[str, str]: return { # Required 'config': 'Path to an ampel config file (yaml/json)', # Optional 'secrets': 'Path to a YAML secrets store in sops format', 'log-profile': 'One of: default, compact, headerless, verbose, debug', 'id-mapper': 'Convert stock ids using the provided id mapper (ex: ZTFIdMapper)', # Selection 'stock': 'Stock id(s) (OR matched if multi-valued)', 'channel': 'Channel(s)', 'created-after-ts': 'Created after unix timestamp', 'created-after-str': 'Created after date-time iso string', 'created-after-delta': 'Created after time delta', 'created-after-process': 'Created after last run of process with name', 'created-before-ts': 'Created before unix timestamp', 'created-before-str': 'Created before date-time iso string', 'created-before-delta': 'Created before time delta', 'created-before-process': 'Created before last run of process with name', 'updated-after-ts': 'Updated after unix timestamp', 'updated-after-str': 'Updated after date-time iso string', 'updated-after-delta': 'Updated after time delta', 'updated-after-process': 'Updated after last run of process with name', 'updated-before-ts': 'Updated before unix timestamp', 'updated-before-str': 'Updated before date-time iso string', 'updated-before-delta': 'Updated before time delta', 'updated-before-process': 'Updated before last run of process with name', 'custom-match': 'Custom mongodb match as JSON string (ex: {"body.aKey": {"$gt": 1}})', } def add_selection_args(self, builder: ArgParserBuilder) -> None: # Selection args builder.add_group('match', 'Stock selection arguments') builder.add_arg('match', "stock", action=MaybeIntAction, nargs="+") builder.add_x_args('match', {'name': 'created-before-str'}, {'name': 'created-before-ts', 'type': int}, {'name': 'created-before-delta', 'action': LoadJSONAction}, {'name': 'created-before-process'} ) builder.add_x_args('match', {'name': 'created-after-str'}, {'name': 'created-after-ts', 'type': int}, {'name': 'created-after-delta', 'action': LoadJSONAction}, {'name': 'created-after-process'} ) builder.add_x_args('match', {'name': 'updated-before-str'}, {'name': 'updated-before-ts', 'type': int}, {'name': 'updated-before-delta', 'action': LoadJSONAction}, {'name': 'updated-before-process'} ) builder.add_x_args('match', {'name': 'updated-after-str'}, {'name': 'updated-after-ts', 'type': int}, {'name': 'updated-after-delta', 'action': LoadJSONAction}, {'name': 'updated-after-process'} ) builder.create_logic_args('match', "channel", "Channel") builder.create_logic_args('match', "with-tag", "Tag") builder.create_logic_args('match', "without-tag", "Tag", excl=True) builder.add_arg('match', "custom-match", metavar="#", action=LoadJSONAction) def get_tag(self, args: Dict[str, Any]) -> Optional[Dict[Union[Literal['with'], Literal['without']], Dict]]: tag: Optional[Dict[Union[Literal['with'], Literal['without']], Dict]] = None if args.get('with_tag'): tag = {'with': args['with_tag']} if args.get('without_tag'): if tag is None: tag = {} tag['without'] = args['without_tag'] return tag def build_select_model(self, args: Dict[str, Any]) -> UnitModel: conf = { "created": self.get_time_model("created", args), "updated": self.get_time_model("updated", args), 'channel': args['channel'], 'custom': args['custom_match'] } if args.get('tag'): conf['tag'] = self.get_tag(args) if (stock := args.get('stock')): conf['custom'] = { '_id': stock if isinstance(stock, (int, bytes, str)) else maybe_match_array(stock) } return UnitModel(unit="T3StockSelector", config=conf) def get_time_model(self, prefix: str, args: Dict[str, Any]) -> TimeConstraintModel: d: Dict[str, Any] = {'after': None, 'before': None} for when in ('after', 'before'): if args.get(x := f"{prefix}_{when}_ts"): d[when] = UnixTimeModel(match_type='unix_time', value=args[x]) elif args.get(x := f"{prefix}_{when}_str"): d[when] = TimeStringModel(match_type='time_string', dateTimeStr=args[x], dateTimeFormat="%Y%m%dT%H%M%S") elif args.get(x := f"{prefix}_{when}_delta"): d[when] = TimeDeltaModel(match_type='time_delta', **args[x]) elif args.get(x := f"{prefix}_{when}_process"): d[when] = TimeLastRunModel(match_type='time_last_run', process_name=args[x]) return TimeConstraintModel(**d)
en
0.36907
#!/usr/bin/env python # -*- coding: utf-8 -*- # File : Ampel-core/ampel/cli/AbsStockCommand.py # License : BSD-3-Clause # Author : vb <<EMAIL>> # Date : 25.03.2021 # Last Modified Date: 25.03.2021 # Last Modified By : vb <<EMAIL>> Base class for commands selecting/matching stock(s) # Required # Optional # Selection # Selection args
1.95767
2
programmers/lv2/42888.py
KLumy/Basic-Algorithm
1
9862
from typing import List def solution(records: List[str]): logger = [] id_name = dict() message = {"Enter": "님이 들어왔습니다.", "Leave": "님이 나갔습니다."} for record in records: op, id, *name = record.split() if name: id_name[id] = name[0] if op in message: logger.append((id, op)) answer = [] for log in logger: id, msg = log answer.append(id_name[id] + message[msg]) return answer if __name__ == "__main__": i = [ "Enter uid1234 Muzi", "Enter uid4567 Prodo", "Leave uid1234", "Enter uid1234 Prodo", "Change uid4567 Ryan", ] print(solution(i))
from typing import List def solution(records: List[str]): logger = [] id_name = dict() message = {"Enter": "님이 들어왔습니다.", "Leave": "님이 나갔습니다."} for record in records: op, id, *name = record.split() if name: id_name[id] = name[0] if op in message: logger.append((id, op)) answer = [] for log in logger: id, msg = log answer.append(id_name[id] + message[msg]) return answer if __name__ == "__main__": i = [ "Enter uid1234 Muzi", "Enter uid4567 Prodo", "Leave uid1234", "Enter uid1234 Prodo", "Change uid4567 Ryan", ] print(solution(i))
none
1
3.420058
3
app/nets.py
bobosoft/intrepyd
2
9863
<reponame>bobosoft/intrepyd """ Implementation of REST API for nets creation """ from flask import Blueprint, request from .utils import typename_to_type from .contexts import contexts nr = Blueprint('nets', __name__) def _create_bool_constant(func): context = request.get_json()['context'] if context is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] net = func(ctx) return {'result': ctx.net2name[net]}, 201 def _create_unary_gate(func): context = request.get_json()['context'] x = request.get_json()['x'] if context is None or x is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] x = ctx.nets[x] assert x is not None net = func(ctx, x) return {'result': ctx.net2name[net]}, 201 def _create_binary_gate(func): context = request.get_json()['context'] x = request.get_json()['x'] y = request.get_json()['y'] if context is None or x is None or y is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] x = ctx.nets[x] y = ctx.nets[y] assert x is not None assert y is not None net = func(ctx, x, y) return {'result': ctx.net2name[net]}, 201 @nr.route('', methods=['GET']) def list_nets(): """ Gets the list of the available nets """ context = request.args.get('context') ctx = contexts[context]['context'] return {'nets': [key for key, _ in ctx.nets.items()]}, 200 @nr.route('/true', methods=['POST']) def create_true(): """ Creates the net true """ return _create_bool_constant(lambda ctx : ctx.mk_true()) @nr.route('/false', methods=['POST']) def create_false(): """ Creates the net false """ return _create_bool_constant(lambda ctx : ctx.mk_false()) @nr.route('/numbers/create', methods=['POST']) def create_number(): """ Creates a number """ context = request.get_json()['context'] value = request.get_json()['value'] typ = request.get_json()['type'] if context is None or value is None or typ is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] assert value is not None assert typ is not None net = ctx.mk_number(value, typename_to_type(ctx, typ)) return {'result': ctx.net2name[net]}, 201 @nr.route('/nots/create', methods=['POST']) def create_not(): """ Creates a logical not """ return _create_unary_gate(lambda ctx, x : ctx.mk_not(x)) @nr.route('/minuses/create', methods=['POST']) def create_minus(): """ Creates an arithmetic minus """ return _create_unary_gate(lambda ctx, x : ctx.mk_minus(x)) @nr.route('/ands/create', methods=['POST']) def create_and(): """ Creates a logical and """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_and(x, y)) @nr.route('/ors/create', methods=['POST']) def create_or(): """ Creates a logical or """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_or(x, y)) @nr.route('/implieses/create', methods=['POST']) def create_implies(): """ Creates a logical implies """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_implies(x, y)) @nr.route('/xors/create', methods=['POST']) def create_xor(): """ Creates a logical xor """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_xor(x, y)) @nr.route('/iffs/create', methods=['POST']) def create_iff(): """ Creates a logical iff """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_iff(x, y)) @nr.route('/adds/create', methods=['POST']) def create_add(): """ Creates an addition """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_add(x, y)) @nr.route('/muls/create', methods=['POST']) def create_mul(): """ Creates a multiplication """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_mul(x, y)) @nr.route('/divs/create', methods=['POST']) def create_div(): """ Creates a division """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_div(x, y)) @nr.route('/mods/create', methods=['POST']) def create_mod(): """ Creates a modulus """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_mod(x, y)) @nr.route('/subs/create', methods=['POST']) def create_sub(): """ Creates a subtraction """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_sub(x, y)) @nr.route('/eqs/create', methods=['POST']) def create_eq(): """ Creates an equality """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_eq(x, y)) @nr.route('/leqs/create', methods=['POST']) def create_leq(): """ Creates an less or equal """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_leq(x, y)) @nr.route('/geqs/create', methods=['POST']) def create_geq(): """ Creates a greater or equal """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_geq(x, y)) @nr.route('/lts/create', methods=['POST']) def create_lt(): """ Creates a less than """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_lt(x, y)) @nr.route('/gts/create', methods=['POST']) def create_gt(): """ Creates a greater than """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_gt(x, y)) @nr.route('/neqs/create', methods=['POST']) def create_neq(): """ Creates a not equal """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_neq(x, y)) @nr.route('/ites/create', methods=['POST']) def create_ite(): """ Creates an if then else """ context = request.get_json()['context'] x = request.get_json()['x'] y = request.get_json()['y'] z = request.get_json()['z'] if context is None or x is None or y is None or z is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] i = ctx.nets[x] t = ctx.nets[y] e = ctx.nets[z] assert i is not None assert t is not None assert e is not None net = ctx.mk_ite(i, t, e) return {'result': ctx.net2name[net]}, 201 @nr.route('/casts/create', methods=['POST']) def create_cast(): """ Creates a type cast """ context = request.get_json()['context'] x = request.get_json()['x'] t = request.get_json()['type'] if context is None or x is None or t is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] x = ctx.nets[x] assert ctx is not None assert x is not None net = None if t == 'int8': net = ctx.mk_cast_to_int8(x) elif t == 'int16': net = ctx.mk_cast_to_int16(x) elif t == 'int32': net = ctx.mk_cast_to_int32(x) elif t == 'int64': net = ctx.mk_cast_to_int64(x) elif t == 'uint8': net = ctx.mk_cast_to_uint8(x) elif t == 'uint16': net = ctx.mk_cast_to_uint16(x) elif t == 'uint32': net = ctx.mk_cast_to_uint32(x) elif t == 'uint64': net = ctx.mk_cast_to_uint64(x) else: return {'result': 'unhandled type {}'.format(t)}, 400 assert net is not None return {'result': ctx.net2name[net]}, 201
""" Implementation of REST API for nets creation """ from flask import Blueprint, request from .utils import typename_to_type from .contexts import contexts nr = Blueprint('nets', __name__) def _create_bool_constant(func): context = request.get_json()['context'] if context is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] net = func(ctx) return {'result': ctx.net2name[net]}, 201 def _create_unary_gate(func): context = request.get_json()['context'] x = request.get_json()['x'] if context is None or x is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] x = ctx.nets[x] assert x is not None net = func(ctx, x) return {'result': ctx.net2name[net]}, 201 def _create_binary_gate(func): context = request.get_json()['context'] x = request.get_json()['x'] y = request.get_json()['y'] if context is None or x is None or y is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] x = ctx.nets[x] y = ctx.nets[y] assert x is not None assert y is not None net = func(ctx, x, y) return {'result': ctx.net2name[net]}, 201 @nr.route('', methods=['GET']) def list_nets(): """ Gets the list of the available nets """ context = request.args.get('context') ctx = contexts[context]['context'] return {'nets': [key for key, _ in ctx.nets.items()]}, 200 @nr.route('/true', methods=['POST']) def create_true(): """ Creates the net true """ return _create_bool_constant(lambda ctx : ctx.mk_true()) @nr.route('/false', methods=['POST']) def create_false(): """ Creates the net false """ return _create_bool_constant(lambda ctx : ctx.mk_false()) @nr.route('/numbers/create', methods=['POST']) def create_number(): """ Creates a number """ context = request.get_json()['context'] value = request.get_json()['value'] typ = request.get_json()['type'] if context is None or value is None or typ is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] assert value is not None assert typ is not None net = ctx.mk_number(value, typename_to_type(ctx, typ)) return {'result': ctx.net2name[net]}, 201 @nr.route('/nots/create', methods=['POST']) def create_not(): """ Creates a logical not """ return _create_unary_gate(lambda ctx, x : ctx.mk_not(x)) @nr.route('/minuses/create', methods=['POST']) def create_minus(): """ Creates an arithmetic minus """ return _create_unary_gate(lambda ctx, x : ctx.mk_minus(x)) @nr.route('/ands/create', methods=['POST']) def create_and(): """ Creates a logical and """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_and(x, y)) @nr.route('/ors/create', methods=['POST']) def create_or(): """ Creates a logical or """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_or(x, y)) @nr.route('/implieses/create', methods=['POST']) def create_implies(): """ Creates a logical implies """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_implies(x, y)) @nr.route('/xors/create', methods=['POST']) def create_xor(): """ Creates a logical xor """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_xor(x, y)) @nr.route('/iffs/create', methods=['POST']) def create_iff(): """ Creates a logical iff """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_iff(x, y)) @nr.route('/adds/create', methods=['POST']) def create_add(): """ Creates an addition """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_add(x, y)) @nr.route('/muls/create', methods=['POST']) def create_mul(): """ Creates a multiplication """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_mul(x, y)) @nr.route('/divs/create', methods=['POST']) def create_div(): """ Creates a division """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_div(x, y)) @nr.route('/mods/create', methods=['POST']) def create_mod(): """ Creates a modulus """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_mod(x, y)) @nr.route('/subs/create', methods=['POST']) def create_sub(): """ Creates a subtraction """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_sub(x, y)) @nr.route('/eqs/create', methods=['POST']) def create_eq(): """ Creates an equality """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_eq(x, y)) @nr.route('/leqs/create', methods=['POST']) def create_leq(): """ Creates an less or equal """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_leq(x, y)) @nr.route('/geqs/create', methods=['POST']) def create_geq(): """ Creates a greater or equal """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_geq(x, y)) @nr.route('/lts/create', methods=['POST']) def create_lt(): """ Creates a less than """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_lt(x, y)) @nr.route('/gts/create', methods=['POST']) def create_gt(): """ Creates a greater than """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_gt(x, y)) @nr.route('/neqs/create', methods=['POST']) def create_neq(): """ Creates a not equal """ return _create_binary_gate(lambda ctx, x, y : ctx.mk_neq(x, y)) @nr.route('/ites/create', methods=['POST']) def create_ite(): """ Creates an if then else """ context = request.get_json()['context'] x = request.get_json()['x'] y = request.get_json()['y'] z = request.get_json()['z'] if context is None or x is None or y is None or z is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] i = ctx.nets[x] t = ctx.nets[y] e = ctx.nets[z] assert i is not None assert t is not None assert e is not None net = ctx.mk_ite(i, t, e) return {'result': ctx.net2name[net]}, 201 @nr.route('/casts/create', methods=['POST']) def create_cast(): """ Creates a type cast """ context = request.get_json()['context'] x = request.get_json()['x'] t = request.get_json()['type'] if context is None or x is None or t is None: return {'result': 'error'}, 400 ctx = contexts[context]['context'] x = ctx.nets[x] assert ctx is not None assert x is not None net = None if t == 'int8': net = ctx.mk_cast_to_int8(x) elif t == 'int16': net = ctx.mk_cast_to_int16(x) elif t == 'int32': net = ctx.mk_cast_to_int32(x) elif t == 'int64': net = ctx.mk_cast_to_int64(x) elif t == 'uint8': net = ctx.mk_cast_to_uint8(x) elif t == 'uint16': net = ctx.mk_cast_to_uint16(x) elif t == 'uint32': net = ctx.mk_cast_to_uint32(x) elif t == 'uint64': net = ctx.mk_cast_to_uint64(x) else: return {'result': 'unhandled type {}'.format(t)}, 400 assert net is not None return {'result': ctx.net2name[net]}, 201
en
0.707029
Implementation of REST API for nets creation Gets the list of the available nets Creates the net true Creates the net false Creates a number Creates a logical not Creates an arithmetic minus Creates a logical and Creates a logical or Creates a logical implies Creates a logical xor Creates a logical iff Creates an addition Creates a multiplication Creates a division Creates a modulus Creates a subtraction Creates an equality Creates an less or equal Creates a greater or equal Creates a less than Creates a greater than Creates a not equal Creates an if then else Creates a type cast
2.647399
3
tensorflow_addons/image/utils.py
Soroosh129/addons
1
9864
<gh_stars>1-10 # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Image util ops.""" import tensorflow as tf def get_ndims(image): return image.get_shape().ndims or tf.rank(image) def to_4D_image(image): """Convert 2/3/4D image to 4D image. Args: image: 2/3/4D tensor. Returns: 4D tensor with the same type. """ with tf.control_dependencies( [ tf.debugging.assert_rank_in( image, [2, 3, 4], message="`image` must be 2/3/4D tensor" ) ] ): ndims = image.get_shape().ndims if ndims is None: return _dynamic_to_4D_image(image) elif ndims == 2: return image[None, :, :, None] elif ndims == 3: return image[None, :, :, :] else: return image def _dynamic_to_4D_image(image): shape = tf.shape(image) original_rank = tf.rank(image) # 4D image => [N, H, W, C] or [N, C, H, W] # 3D image => [1, H, W, C] or [1, C, H, W] # 2D image => [1, H, W, 1] left_pad = tf.cast(tf.less_equal(original_rank, 3), dtype=tf.int32) right_pad = tf.cast(tf.equal(original_rank, 2), dtype=tf.int32) new_shape = tf.concat( [ tf.ones(shape=left_pad, dtype=tf.int32), shape, tf.ones(shape=right_pad, dtype=tf.int32), ], axis=0, ) return tf.reshape(image, new_shape) def from_4D_image(image, ndims): """Convert back to an image with `ndims` rank. Args: image: 4D tensor. ndims: The original rank of the image. Returns: `ndims`-D tensor with the same type. """ with tf.control_dependencies( [tf.debugging.assert_rank(image, 4, message="`image` must be 4D tensor")] ): if isinstance(ndims, tf.Tensor): return _dynamic_from_4D_image(image, ndims) elif ndims == 2: return tf.squeeze(image, [0, 3]) elif ndims == 3: return tf.squeeze(image, [0]) else: return image def _dynamic_from_4D_image(image, original_rank): shape = tf.shape(image) # 4D image <= [N, H, W, C] or [N, C, H, W] # 3D image <= [1, H, W, C] or [1, C, H, W] # 2D image <= [1, H, W, 1] begin = tf.cast(tf.less_equal(original_rank, 3), dtype=tf.int32) end = 4 - tf.cast(tf.equal(original_rank, 2), dtype=tf.int32) new_shape = shape[begin:end] return tf.reshape(image, new_shape)
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Image util ops.""" import tensorflow as tf def get_ndims(image): return image.get_shape().ndims or tf.rank(image) def to_4D_image(image): """Convert 2/3/4D image to 4D image. Args: image: 2/3/4D tensor. Returns: 4D tensor with the same type. """ with tf.control_dependencies( [ tf.debugging.assert_rank_in( image, [2, 3, 4], message="`image` must be 2/3/4D tensor" ) ] ): ndims = image.get_shape().ndims if ndims is None: return _dynamic_to_4D_image(image) elif ndims == 2: return image[None, :, :, None] elif ndims == 3: return image[None, :, :, :] else: return image def _dynamic_to_4D_image(image): shape = tf.shape(image) original_rank = tf.rank(image) # 4D image => [N, H, W, C] or [N, C, H, W] # 3D image => [1, H, W, C] or [1, C, H, W] # 2D image => [1, H, W, 1] left_pad = tf.cast(tf.less_equal(original_rank, 3), dtype=tf.int32) right_pad = tf.cast(tf.equal(original_rank, 2), dtype=tf.int32) new_shape = tf.concat( [ tf.ones(shape=left_pad, dtype=tf.int32), shape, tf.ones(shape=right_pad, dtype=tf.int32), ], axis=0, ) return tf.reshape(image, new_shape) def from_4D_image(image, ndims): """Convert back to an image with `ndims` rank. Args: image: 4D tensor. ndims: The original rank of the image. Returns: `ndims`-D tensor with the same type. """ with tf.control_dependencies( [tf.debugging.assert_rank(image, 4, message="`image` must be 4D tensor")] ): if isinstance(ndims, tf.Tensor): return _dynamic_from_4D_image(image, ndims) elif ndims == 2: return tf.squeeze(image, [0, 3]) elif ndims == 3: return tf.squeeze(image, [0]) else: return image def _dynamic_from_4D_image(image, original_rank): shape = tf.shape(image) # 4D image <= [N, H, W, C] or [N, C, H, W] # 3D image <= [1, H, W, C] or [1, C, H, W] # 2D image <= [1, H, W, 1] begin = tf.cast(tf.less_equal(original_rank, 3), dtype=tf.int32) end = 4 - tf.cast(tf.equal(original_rank, 2), dtype=tf.int32) new_shape = shape[begin:end] return tf.reshape(image, new_shape)
en
0.792074
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== Image util ops. Convert 2/3/4D image to 4D image. Args: image: 2/3/4D tensor. Returns: 4D tensor with the same type. # 4D image => [N, H, W, C] or [N, C, H, W] # 3D image => [1, H, W, C] or [1, C, H, W] # 2D image => [1, H, W, 1] Convert back to an image with `ndims` rank. Args: image: 4D tensor. ndims: The original rank of the image. Returns: `ndims`-D tensor with the same type. # 4D image <= [N, H, W, C] or [N, C, H, W] # 3D image <= [1, H, W, C] or [1, C, H, W] # 2D image <= [1, H, W, 1]
2.354988
2
tests/test_charge.py
fossabot/MolVS
1
9865
<gh_stars>1-10 #!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for charge.py""" from __future__ import print_function from __future__ import unicode_literals from __future__ import division import logging from rdkit import Chem from molvs.standardize import Standardizer, standardize_smiles from molvs.charge import Reionizer logging.basicConfig(level=logging.DEBUG) def charge_parent_smiles(smiles, prefer_organic=False): """Utility function that returns the charge parent SMILES for given a SMILES string.""" mol = Chem.MolFromSmiles(smiles.encode('utf8'), sanitize=False) mol = Standardizer(prefer_organic=prefer_organic).charge_parent(mol) if mol: return Chem.MolToSmiles(mol, isomericSmiles=True) def test_charge_parent(): """Test neutralization of ionized acids and bases.""" assert charge_parent_smiles('C(C(=O)[O-])(Cc1n[n-]nn1)(C[NH3+])(C[N+](=O)[O-])') == 'NCC(Cc1nn[nH]n1)(C[N+](=O)[O-])C(=O)O' def test_charge_parent2(): """Test preservation of zwitterion.""" assert charge_parent_smiles('n(C)1cc[n+]2cccc([O-])c12') == 'Cn1cc[n+]2cccc([O-])c12' def test_charge_parent3(): """Choline should be left with a positive charge.""" assert charge_parent_smiles('C[N+](C)(C)CCO') == 'C[N+](C)(C)CCO' def test_charge_parent4(): """This should have the hydrogen removed to give deanol as a charge parent.""" assert charge_parent_smiles('C[NH+](C)CCO') == 'CN(C)CCO' def test_charge_parent5(): """Sodium benzoate to benzoic acid.""" assert charge_parent_smiles('[Na+].O=C([O-])c1ccccc1') == 'O=C(O)c1ccccc1' def test_charge_parent6(): """Benzoate ion to benzoic acid.""" assert charge_parent_smiles('O=C([O-])c1ccccc1') == 'O=C(O)c1ccccc1' def test_charge_parent7(): """Charges in histidine should be neutralized.""" assert charge_parent_smiles('[NH3+]C(Cc1cnc[nH]1)C(=O)[O-]') == 'NC(Cc1cnc[nH]1)C(=O)O' def test_charge_parent8(): """""" assert charge_parent_smiles('C[NH+](C)(C).[Cl-]') == 'CN(C)C' def test_charge_parent9(): """No organic fragments.""" assert charge_parent_smiles('[N+](=O)([O-])[O-]') == 'O=[N+]([O-])[O-]' def test_charge_parent10(): """No organic fragments.""" assert charge_parent_smiles('[N+](=O)([O-])[O-]', prefer_organic=True) == 'O=[N+]([O-])[O-]' def test_charge_parent11(): """Larger inorganic fragment should be chosen.""" assert charge_parent_smiles('[N+](=O)([O-])[O-].[CH2]') == 'O=[N+]([O-])[O-]' def test_charge_parent12(): """Smaller organic fragment should be chosen over larger inorganic fragment.""" assert charge_parent_smiles('[N+](=O)([O-])[O-].[CH2]', prefer_organic=True) == '[CH2]' def test_standardize(): """Test table salt.""" assert standardize_smiles('[Na].[Cl]') == '[Cl-].[Na+]' def test_reionize(): """Test reionizer moves proton to weaker acid.""" mol = Chem.MolFromSmiles('C1=C(C=CC(=C1)[S]([O-])=O)[S](O)(=O)=O') r = Reionizer() mol = r.reionize(mol) assert Chem.MolToSmiles(mol) == 'O=S(O)c1ccc(S(=O)(=O)[O-])cc1' def test_reionize2(): """Test charged carbon doesn't get recognised as alpha-carbon-hydrogen-keto.""" mol = Chem.MolFromSmiles('CCOC(=O)C(=O)[CH-]C#N') r = Reionizer() mol = r.reionize(mol) assert Chem.MolToSmiles(mol) == 'CCOC(=O)C(=O)[CH-]C#N' def test_reionize3(): """""" mol = Chem.MolFromSmiles('C[N+]1=C[CH-]N(C(=N)N)/C1=C/[N+](=O)[O-]') r = Reionizer() mol = r.reionize(mol) assert Chem.MolToSmiles(mol) == 'C[N+]1=CCN(C(=N)N)C1=[C-][N+](=O)[O-]' def test_should_complete(): """Reionization should not infinitely loop forever on these molecules.""" # GitHub Issue #14 assert standardize_smiles('CCCCCCCCCCCCCCCCCC(=O)CC(=C)C(=O)O[Ti](=O)(OC(C)C)C(C)C') == 'C=C(CC(=O)[CH-]CCCCCCCCCCCCCCCC)C(=O)[O-].CC(C)[O-].CCC.[O-2].[Ti+5]' assert standardize_smiles('OP(=O)(O)[O-].OP(=O)([O-])[O-].[O-]S(=O)(=O)[O-].[Na+].[Na+].[Na+].[Mg+2].[Cl-].[Cl-].[K+].[K+]') == 'O=P([O-])(O)O.O=P([O-])([O-])O.O=S(=O)([O-])[O-].[Cl-].[Cl-].[K+].[K+].[Mg+2].[Na+].[Na+].[Na+]' def test_forced_charge1(): """Test forced charge correction maintaining overall neutral charge.""" assert standardize_smiles('[Na].O=C(O)c1ccccc1') == 'O=C([O-])c1ccccc1.[Na+]' def test_forced_charge2(): """Test forced charge correction with no corresponding proton for neutralization.""" # GitHub Issue #15 assert standardize_smiles('[Na].[Na]') == '[Na+].[Na+]' # TODO: Arguably should become selenite ion... O=[Se]([O-])[O-]. Need an AcidBasePair? assert standardize_smiles('[Na].[Na].O[Se](O)=O') == 'O=[Se](O)O.[Na+].[Na+]' # def test_reionize3(): # """Test canonical ionization position when multiple equivalent possibilities.""" # mol = Chem.MolFromSmiles('CC1=CC(=CC=C1S(O)=O)S([O-])=O') # mol2 = Chem.MolFromSmiles('CC1=CC(=CC=C1S([O-])=O)S(O)=O') # r = Reionizer() # mol = r.reionize(mol) # mol2 = r.reionize(mol2) # assert Chem.MolToSmiles(mol) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O' # assert Chem.MolToSmiles(mol2) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O' # assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2) # # # def test_reionize4(): # """Test canonical ionization position when multiple equivalent possibilities.""" # mol = Chem.MolFromSmiles('CCOC(=O)C(=O)[CH-]C#N') # mol2 = Chem.MolFromSmiles('[CH2-]COC(=O)C(=O)CC#N') # r = Reionizer() # mol = r.reionize(mol) # mol2 = r.reionize(mol2) # assert Chem.MolToSmiles(mol) == '[CH2-]COC(=O)C(=O)CC#N' # assert Chem.MolToSmiles(mol2) == '' # assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2)
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for charge.py""" from __future__ import print_function from __future__ import unicode_literals from __future__ import division import logging from rdkit import Chem from molvs.standardize import Standardizer, standardize_smiles from molvs.charge import Reionizer logging.basicConfig(level=logging.DEBUG) def charge_parent_smiles(smiles, prefer_organic=False): """Utility function that returns the charge parent SMILES for given a SMILES string.""" mol = Chem.MolFromSmiles(smiles.encode('utf8'), sanitize=False) mol = Standardizer(prefer_organic=prefer_organic).charge_parent(mol) if mol: return Chem.MolToSmiles(mol, isomericSmiles=True) def test_charge_parent(): """Test neutralization of ionized acids and bases.""" assert charge_parent_smiles('C(C(=O)[O-])(Cc1n[n-]nn1)(C[NH3+])(C[N+](=O)[O-])') == 'NCC(Cc1nn[nH]n1)(C[N+](=O)[O-])C(=O)O' def test_charge_parent2(): """Test preservation of zwitterion.""" assert charge_parent_smiles('n(C)1cc[n+]2cccc([O-])c12') == 'Cn1cc[n+]2cccc([O-])c12' def test_charge_parent3(): """Choline should be left with a positive charge.""" assert charge_parent_smiles('C[N+](C)(C)CCO') == 'C[N+](C)(C)CCO' def test_charge_parent4(): """This should have the hydrogen removed to give deanol as a charge parent.""" assert charge_parent_smiles('C[NH+](C)CCO') == 'CN(C)CCO' def test_charge_parent5(): """Sodium benzoate to benzoic acid.""" assert charge_parent_smiles('[Na+].O=C([O-])c1ccccc1') == 'O=C(O)c1ccccc1' def test_charge_parent6(): """Benzoate ion to benzoic acid.""" assert charge_parent_smiles('O=C([O-])c1ccccc1') == 'O=C(O)c1ccccc1' def test_charge_parent7(): """Charges in histidine should be neutralized.""" assert charge_parent_smiles('[NH3+]C(Cc1cnc[nH]1)C(=O)[O-]') == 'NC(Cc1cnc[nH]1)C(=O)O' def test_charge_parent8(): """""" assert charge_parent_smiles('C[NH+](C)(C).[Cl-]') == 'CN(C)C' def test_charge_parent9(): """No organic fragments.""" assert charge_parent_smiles('[N+](=O)([O-])[O-]') == 'O=[N+]([O-])[O-]' def test_charge_parent10(): """No organic fragments.""" assert charge_parent_smiles('[N+](=O)([O-])[O-]', prefer_organic=True) == 'O=[N+]([O-])[O-]' def test_charge_parent11(): """Larger inorganic fragment should be chosen.""" assert charge_parent_smiles('[N+](=O)([O-])[O-].[CH2]') == 'O=[N+]([O-])[O-]' def test_charge_parent12(): """Smaller organic fragment should be chosen over larger inorganic fragment.""" assert charge_parent_smiles('[N+](=O)([O-])[O-].[CH2]', prefer_organic=True) == '[CH2]' def test_standardize(): """Test table salt.""" assert standardize_smiles('[Na].[Cl]') == '[Cl-].[Na+]' def test_reionize(): """Test reionizer moves proton to weaker acid.""" mol = Chem.MolFromSmiles('C1=C(C=CC(=C1)[S]([O-])=O)[S](O)(=O)=O') r = Reionizer() mol = r.reionize(mol) assert Chem.MolToSmiles(mol) == 'O=S(O)c1ccc(S(=O)(=O)[O-])cc1' def test_reionize2(): """Test charged carbon doesn't get recognised as alpha-carbon-hydrogen-keto.""" mol = Chem.MolFromSmiles('CCOC(=O)C(=O)[CH-]C#N') r = Reionizer() mol = r.reionize(mol) assert Chem.MolToSmiles(mol) == 'CCOC(=O)C(=O)[CH-]C#N' def test_reionize3(): """""" mol = Chem.MolFromSmiles('C[N+]1=C[CH-]N(C(=N)N)/C1=C/[N+](=O)[O-]') r = Reionizer() mol = r.reionize(mol) assert Chem.MolToSmiles(mol) == 'C[N+]1=CCN(C(=N)N)C1=[C-][N+](=O)[O-]' def test_should_complete(): """Reionization should not infinitely loop forever on these molecules.""" # GitHub Issue #14 assert standardize_smiles('CCCCCCCCCCCCCCCCCC(=O)CC(=C)C(=O)O[Ti](=O)(OC(C)C)C(C)C') == 'C=C(CC(=O)[CH-]CCCCCCCCCCCCCCCC)C(=O)[O-].CC(C)[O-].CCC.[O-2].[Ti+5]' assert standardize_smiles('OP(=O)(O)[O-].OP(=O)([O-])[O-].[O-]S(=O)(=O)[O-].[Na+].[Na+].[Na+].[Mg+2].[Cl-].[Cl-].[K+].[K+]') == 'O=P([O-])(O)O.O=P([O-])([O-])O.O=S(=O)([O-])[O-].[Cl-].[Cl-].[K+].[K+].[Mg+2].[Na+].[Na+].[Na+]' def test_forced_charge1(): """Test forced charge correction maintaining overall neutral charge.""" assert standardize_smiles('[Na].O=C(O)c1ccccc1') == 'O=C([O-])c1ccccc1.[Na+]' def test_forced_charge2(): """Test forced charge correction with no corresponding proton for neutralization.""" # GitHub Issue #15 assert standardize_smiles('[Na].[Na]') == '[Na+].[Na+]' # TODO: Arguably should become selenite ion... O=[Se]([O-])[O-]. Need an AcidBasePair? assert standardize_smiles('[Na].[Na].O[Se](O)=O') == 'O=[Se](O)O.[Na+].[Na+]' # def test_reionize3(): # """Test canonical ionization position when multiple equivalent possibilities.""" # mol = Chem.MolFromSmiles('CC1=CC(=CC=C1S(O)=O)S([O-])=O') # mol2 = Chem.MolFromSmiles('CC1=CC(=CC=C1S([O-])=O)S(O)=O') # r = Reionizer() # mol = r.reionize(mol) # mol2 = r.reionize(mol2) # assert Chem.MolToSmiles(mol) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O' # assert Chem.MolToSmiles(mol2) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O' # assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2) # # # def test_reionize4(): # """Test canonical ionization position when multiple equivalent possibilities.""" # mol = Chem.MolFromSmiles('CCOC(=O)C(=O)[CH-]C#N') # mol2 = Chem.MolFromSmiles('[CH2-]COC(=O)C(=O)CC#N') # r = Reionizer() # mol = r.reionize(mol) # mol2 = r.reionize(mol2) # assert Chem.MolToSmiles(mol) == '[CH2-]COC(=O)C(=O)CC#N' # assert Chem.MolToSmiles(mol2) == '' # assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2)
en
0.55616
#!/usr/bin/env python # -*- coding: utf-8 -*- Tests for charge.py Utility function that returns the charge parent SMILES for given a SMILES string. Test neutralization of ionized acids and bases. Test preservation of zwitterion. Choline should be left with a positive charge. This should have the hydrogen removed to give deanol as a charge parent. Sodium benzoate to benzoic acid. Benzoate ion to benzoic acid. Charges in histidine should be neutralized. No organic fragments. No organic fragments. Larger inorganic fragment should be chosen. Smaller organic fragment should be chosen over larger inorganic fragment. Test table salt. Test reionizer moves proton to weaker acid. Test charged carbon doesn't get recognised as alpha-carbon-hydrogen-keto. #N') #N' Reionization should not infinitely loop forever on these molecules. # GitHub Issue #14 Test forced charge correction maintaining overall neutral charge. Test forced charge correction with no corresponding proton for neutralization. # GitHub Issue #15 # TODO: Arguably should become selenite ion... O=[Se]([O-])[O-]. Need an AcidBasePair? # def test_reionize3(): # """Test canonical ionization position when multiple equivalent possibilities.""" # mol = Chem.MolFromSmiles('CC1=CC(=CC=C1S(O)=O)S([O-])=O') # mol2 = Chem.MolFromSmiles('CC1=CC(=CC=C1S([O-])=O)S(O)=O') # r = Reionizer() # mol = r.reionize(mol) # mol2 = r.reionize(mol2) # assert Chem.MolToSmiles(mol) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O' # assert Chem.MolToSmiles(mol2) == 'Cc1cc(S(=O)[O-])ccc1S(=O)O' # assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2) # # # def test_reionize4(): # """Test canonical ionization position when multiple equivalent possibilities.""" # mol = Chem.MolFromSmiles('CCOC(=O)C(=O)[CH-]C#N') # mol2 = Chem.MolFromSmiles('[CH2-]COC(=O)C(=O)CC#N') # r = Reionizer() # mol = r.reionize(mol) # mol2 = r.reionize(mol2) # assert Chem.MolToSmiles(mol) == '[CH2-]COC(=O)C(=O)CC#N' # assert Chem.MolToSmiles(mol2) == '' # assert Chem.MolToSmiles(mol) == Chem.MolToSmiles(mol2)
2.385386
2
backend/users/views.py
jochanmin/Blog
11
9866
<reponame>jochanmin/Blog from django.shortcuts import render from django.core import serializers from .models import User from django.forms.models import model_to_dict from rest_framework import status from rest_framework.response import Response from rest_framework.decorators import api_view, permission_classes from rest_framework.permissions import IsAuthenticated #회원가입 /users/auth/ #아이디를 등록하는곳 /users/register @api_view(['POST']) def register(request): data=request.data if all(i in data for i in ('email','nickname','password')): email_check=User.objects.filter(email=data['email']) nick_check=User.objects.filter(nickname=data['nickname']) if email_check.exists(): return Response({"message": "email already exists"}, status=status.HTTP_409_CONFLICT) elif nick_check.exists(): return Response({"message": "nickname already exists"}, status=status.HTTP_409_CONFLICT) else: user = User.objects.create_user( data['email'], data['nickname'], data['password'], ) user.save() return Response(model_to_dict(user), status=status.HTTP_201_CREATED) else: return Response({"message": "key error"}, status=status.HTTP_400_BAD_REQUEST) # 토큰을 주면 해당 유저의 정보를 얻는 곳 /users/users @api_view(['GET']) @permission_classes((IsAuthenticated,)) def info(request): user = request.user data = request.data try: searchU=User.objects.filter(email=user.email) if searchU.count==0: return Response({"message": "Can't find info"}, status=status.HTTP_404_NOT_FOUND) data = { 'email': user.email, 'nickname':user.nickname } return Response((data), status=status.HTTP_200_OK) except User.DoesNotExist: return Response({"message": "info does not exists"}, status=status.HTTP_404_NOT_FOUND)
from django.shortcuts import render from django.core import serializers from .models import User from django.forms.models import model_to_dict from rest_framework import status from rest_framework.response import Response from rest_framework.decorators import api_view, permission_classes from rest_framework.permissions import IsAuthenticated #회원가입 /users/auth/ #아이디를 등록하는곳 /users/register @api_view(['POST']) def register(request): data=request.data if all(i in data for i in ('email','nickname','password')): email_check=User.objects.filter(email=data['email']) nick_check=User.objects.filter(nickname=data['nickname']) if email_check.exists(): return Response({"message": "email already exists"}, status=status.HTTP_409_CONFLICT) elif nick_check.exists(): return Response({"message": "nickname already exists"}, status=status.HTTP_409_CONFLICT) else: user = User.objects.create_user( data['email'], data['nickname'], data['password'], ) user.save() return Response(model_to_dict(user), status=status.HTTP_201_CREATED) else: return Response({"message": "key error"}, status=status.HTTP_400_BAD_REQUEST) # 토큰을 주면 해당 유저의 정보를 얻는 곳 /users/users @api_view(['GET']) @permission_classes((IsAuthenticated,)) def info(request): user = request.user data = request.data try: searchU=User.objects.filter(email=user.email) if searchU.count==0: return Response({"message": "Can't find info"}, status=status.HTTP_404_NOT_FOUND) data = { 'email': user.email, 'nickname':user.nickname } return Response((data), status=status.HTTP_200_OK) except User.DoesNotExist: return Response({"message": "info does not exists"}, status=status.HTTP_404_NOT_FOUND)
ko
0.982314
#회원가입 /users/auth/ #아이디를 등록하는곳 /users/register # 토큰을 주면 해당 유저의 정보를 얻는 곳 /users/users
2.054309
2
src/test/test_Location.py
MrRollyPanda/astral
0
9867
# -*- coding: utf-8 -*- from pytest import raises from astral import Astral, AstralError, Location import datetime import pytz def datetime_almost_equal(datetime1, datetime2, seconds=60): dd = datetime1 - datetime2 sd = (dd.days * 24 * 60 * 60) + dd.seconds return abs(sd) <= seconds def test_Location_Name(): c = Location() assert c.name == 'Greenwich' c.name = 'London' assert c.name == 'London' c.name = 'Köln' assert c.name == 'Köln' def test_Location_Country(): c = Location() assert c.region == 'England' c.region = 'Australia' assert c.region == 'Australia' def test_Location_Elevation(): dd = Astral() c = dd['London'] assert c.elevation == 24 def test_Location_TimezoneName(): c = Location() assert c.timezone == 'Europe/London' c.name = 'Asia/Riyadh' assert c.name == 'Asia/Riyadh' def test_Location_TimezoneNameNoLocation(): c = Location() c._timezone_group = 'Europe' c._timezone_location = '' assert c.timezone == 'Europe' def test_Location_TimezoneNameBad(): c = Location() with raises(ValueError): c.timezone = 'bad/timezone' def test_Location_TimezoneLookup(): c = Location() assert c.tz == pytz.timezone('Europe/London') c.timezone='Europe/Stockholm' assert c.tz == pytz.timezone('Europe/Stockholm') def test_Location_TimezoneLookupBad(): c = Location() c._timezone_group = 'bad' c._timezone_location = 'timezone' with raises(AstralError): c.tz def test_Location_Sun(): c = Location() c.sun() def test_Location_Dawn(): c = Location() c.dawn() def test_Location_DawnUTC(): c = Location() c.dawn(local=False) def test_Location_Sunrise(): c = Location() c.sunrise() def test_Location_SunriseUTC(): c = Location() c.sunrise(local=False) def test_Location_SolarNoon(): c = Location() c.solar_noon() def test_Location_SolarNoonUTC(): c = Location() c.solar_noon(local=False) def test_Location_Dusk(): c = Location() c.dusk() def test_Location_DuskUTC(): c = Location() c.dusk(local=False) def test_Location_Sunset(): c = Location() c.sunset() def test_Location_SunsetUTC(): c = Location() c.sunset(local=False) def test_Location_SolarElevation(): dd = Astral() location = dd['Riyadh'] dt = datetime.datetime(2015, 12, 14, 8, 0, 0) dt = location.tz.localize(dt) elevation = location.solar_elevation(dt) assert abs(elevation - 17) < 0.5 def test_Location_SolarAzimuth(): dd = Astral() location = dd['Riyadh'] dt = datetime.datetime(2015, 12, 14, 8, 0, 0) dt = location.tz.localize(dt) azimuth = location.solar_azimuth(dt) assert abs(azimuth - 126) < 0.5 def test_Location_TimeAtElevation(): dd = Astral() location = dd['New Delhi'] test_data = { datetime.date(2016, 1, 5): datetime.datetime(2016, 1, 5, 10, 0), } for day, cdt in test_data.items(): cdt = location.tz.localize(cdt) dt = location.time_at_elevation(28, date=day) assert datetime_almost_equal(dt, cdt, seconds=600) def test_Location_SolarDepression(): c = Location(("Heidelberg", "Germany", 49.412, -8.71, "Europe/Berlin")) c.solar_depression = 'nautical' assert c.solar_depression == 12 c.solar_depression = 18 assert c.solar_depression == 18 def test_Location_Moon(): d = datetime.date(2017, 12, 1) c=Location() assert c.moon_phase(date=d) == 11 def test_Location_TzError(): with raises(AttributeError): c = Location() c.tz = 1 def test_Location_equality(): c1 = Location() c2 = Location() t = (c1, c2) assert c1 == c2 assert len(set(t)) == 1 c1 = Location(["Oslo", "Norway", 59.9, 10.7, "Europe/Oslo", 0]) c2 = Location(["Oslo", "Norway", 59.9, 10.7, "Europe/Oslo", 0]) c3 = Location(["Stockholm", "Sweden", 59.3, 18, "Europe/Stockholm", 0]) t1 = (c1, c2) t2 = (c1, c3) assert c1 == c2 assert len(set(t1)) == 1 assert c1 != c3 assert len(set(t2)) == 2
# -*- coding: utf-8 -*- from pytest import raises from astral import Astral, AstralError, Location import datetime import pytz def datetime_almost_equal(datetime1, datetime2, seconds=60): dd = datetime1 - datetime2 sd = (dd.days * 24 * 60 * 60) + dd.seconds return abs(sd) <= seconds def test_Location_Name(): c = Location() assert c.name == 'Greenwich' c.name = 'London' assert c.name == 'London' c.name = 'Köln' assert c.name == 'Köln' def test_Location_Country(): c = Location() assert c.region == 'England' c.region = 'Australia' assert c.region == 'Australia' def test_Location_Elevation(): dd = Astral() c = dd['London'] assert c.elevation == 24 def test_Location_TimezoneName(): c = Location() assert c.timezone == 'Europe/London' c.name = 'Asia/Riyadh' assert c.name == 'Asia/Riyadh' def test_Location_TimezoneNameNoLocation(): c = Location() c._timezone_group = 'Europe' c._timezone_location = '' assert c.timezone == 'Europe' def test_Location_TimezoneNameBad(): c = Location() with raises(ValueError): c.timezone = 'bad/timezone' def test_Location_TimezoneLookup(): c = Location() assert c.tz == pytz.timezone('Europe/London') c.timezone='Europe/Stockholm' assert c.tz == pytz.timezone('Europe/Stockholm') def test_Location_TimezoneLookupBad(): c = Location() c._timezone_group = 'bad' c._timezone_location = 'timezone' with raises(AstralError): c.tz def test_Location_Sun(): c = Location() c.sun() def test_Location_Dawn(): c = Location() c.dawn() def test_Location_DawnUTC(): c = Location() c.dawn(local=False) def test_Location_Sunrise(): c = Location() c.sunrise() def test_Location_SunriseUTC(): c = Location() c.sunrise(local=False) def test_Location_SolarNoon(): c = Location() c.solar_noon() def test_Location_SolarNoonUTC(): c = Location() c.solar_noon(local=False) def test_Location_Dusk(): c = Location() c.dusk() def test_Location_DuskUTC(): c = Location() c.dusk(local=False) def test_Location_Sunset(): c = Location() c.sunset() def test_Location_SunsetUTC(): c = Location() c.sunset(local=False) def test_Location_SolarElevation(): dd = Astral() location = dd['Riyadh'] dt = datetime.datetime(2015, 12, 14, 8, 0, 0) dt = location.tz.localize(dt) elevation = location.solar_elevation(dt) assert abs(elevation - 17) < 0.5 def test_Location_SolarAzimuth(): dd = Astral() location = dd['Riyadh'] dt = datetime.datetime(2015, 12, 14, 8, 0, 0) dt = location.tz.localize(dt) azimuth = location.solar_azimuth(dt) assert abs(azimuth - 126) < 0.5 def test_Location_TimeAtElevation(): dd = Astral() location = dd['New Delhi'] test_data = { datetime.date(2016, 1, 5): datetime.datetime(2016, 1, 5, 10, 0), } for day, cdt in test_data.items(): cdt = location.tz.localize(cdt) dt = location.time_at_elevation(28, date=day) assert datetime_almost_equal(dt, cdt, seconds=600) def test_Location_SolarDepression(): c = Location(("Heidelberg", "Germany", 49.412, -8.71, "Europe/Berlin")) c.solar_depression = 'nautical' assert c.solar_depression == 12 c.solar_depression = 18 assert c.solar_depression == 18 def test_Location_Moon(): d = datetime.date(2017, 12, 1) c=Location() assert c.moon_phase(date=d) == 11 def test_Location_TzError(): with raises(AttributeError): c = Location() c.tz = 1 def test_Location_equality(): c1 = Location() c2 = Location() t = (c1, c2) assert c1 == c2 assert len(set(t)) == 1 c1 = Location(["Oslo", "Norway", 59.9, 10.7, "Europe/Oslo", 0]) c2 = Location(["Oslo", "Norway", 59.9, 10.7, "Europe/Oslo", 0]) c3 = Location(["Stockholm", "Sweden", 59.3, 18, "Europe/Stockholm", 0]) t1 = (c1, c2) t2 = (c1, c3) assert c1 == c2 assert len(set(t1)) == 1 assert c1 != c3 assert len(set(t2)) == 2
en
0.769321
# -*- coding: utf-8 -*-
2.464534
2
__init__.py
minjunli/jsonc
2
9868
from .jsonc import load, loads, dump, dumps
from .jsonc import load, loads, dump, dumps
none
1
1.081979
1
specs/d3d9caps.py
prahal/apitrace
1
9869
########################################################################## # # Copyright 2008-2009 VMware, Inc. # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ##########################################################################/ """d3d9caps.h""" from winapi import * from d3d9types import * D3DVS20CAPS = Flags(DWORD, [ "D3DVS20CAPS_PREDICATION", ]) D3DVSHADERCAPS2_0 = Struct("D3DVSHADERCAPS2_0", [ (D3DVS20CAPS, "Caps"), (INT, "DynamicFlowControlDepth"), (INT, "NumTemps"), (INT, "StaticFlowControlDepth"), ]) D3DPS20CAPS = Flags(DWORD, [ "D3DPS20CAPS_ARBITRARYSWIZZLE", "D3DPS20CAPS_GRADIENTINSTRUCTIONS", "D3DPS20CAPS_PREDICATION", "D3DPS20CAPS_NODEPENDENTREADLIMIT", "D3DPS20CAPS_NOTEXINSTRUCTIONLIMIT", ]) D3DPSHADERCAPS2_0 = Struct("D3DPSHADERCAPS2_0", [ (D3DPS20CAPS, "Caps"), (INT, "DynamicFlowControlDepth"), (INT, "NumTemps"), (INT, "StaticFlowControlDepth"), (INT, "NumInstructionSlots"), ]) D3DCAPS = Flags(DWORD, [ "D3DCAPS_READ_SCANLINE", ]) D3DCAPS2 = Flags(DWORD, [ "D3DCAPS2_FULLSCREENGAMMA", "D3DCAPS2_CANCALIBRATEGAMMA", "D3DCAPS2_RESERVED", "D3DCAPS2_CANMANAGERESOURCE", "D3DCAPS2_DYNAMICTEXTURES", "D3DCAPS2_CANAUTOGENMIPMAP", "D3DCAPS2_CANSHARERESOURCE", ]) D3DCAPS3 = Flags(DWORD, [ "D3DCAPS3_RESERVED", "D3DCAPS3_ALPHA_FULLSCREEN_FLIP_OR_DISCARD", "D3DCAPS3_LINEAR_TO_SRGB_PRESENTATION", "D3DCAPS3_COPY_TO_VIDMEM", "D3DCAPS3_COPY_TO_SYSTEMMEM", ]) D3DPRESENT_INTERVAL = Flags(DWORD, [ #"D3DPRESENT_INTERVAL_DEFAULT", # 0 "D3DPRESENT_INTERVAL_ONE", "D3DPRESENT_INTERVAL_TWO", "D3DPRESENT_INTERVAL_THREE", "D3DPRESENT_INTERVAL_FOUR", "D3DPRESENT_INTERVAL_IMMEDIATE", ]) D3DCURSORCAPS = Flags(DWORD, [ "D3DCURSORCAPS_COLOR", "D3DCURSORCAPS_LOWRES", ]) D3DDEVCAPS = Flags(DWORD, [ "D3DDEVCAPS_EXECUTESYSTEMMEMORY", "D3DDEVCAPS_EXECUTEVIDEOMEMORY", "D3DDEVCAPS_TLVERTEXSYSTEMMEMORY", "D3DDEVCAPS_TLVERTEXVIDEOMEMORY", "D3DDEVCAPS_TEXTURESYSTEMMEMORY", "D3DDEVCAPS_TEXTUREVIDEOMEMORY", "D3DDEVCAPS_DRAWPRIMTLVERTEX", "D3DDEVCAPS_CANRENDERAFTERFLIP", "D3DDEVCAPS_TEXTURENONLOCALVIDMEM", "D3DDEVCAPS_DRAWPRIMITIVES2", "D3DDEVCAPS_SEPARATETEXTUREMEMORIES", "D3DDEVCAPS_DRAWPRIMITIVES2EX", "D3DDEVCAPS_HWTRANSFORMANDLIGHT", "D3DDEVCAPS_CANBLTSYSTONONLOCAL", "D3DDEVCAPS_HWRASTERIZATION", "D3DDEVCAPS_PUREDEVICE", "D3DDEVCAPS_QUINTICRTPATCHES", "D3DDEVCAPS_RTPATCHES", "D3DDEVCAPS_RTPATCHHANDLEZERO", "D3DDEVCAPS_NPATCHES", ]) D3DPMISCCAPS = Flags(DWORD, [ "D3DPMISCCAPS_MASKZ", "D3DPMISCCAPS_CULLNONE", "D3DPMISCCAPS_CULLCW", "D3DPMISCCAPS_CULLCCW", "D3DPMISCCAPS_COLORWRITEENABLE", "D3DPMISCCAPS_CLIPPLANESCALEDPOINTS", "D3DPMISCCAPS_CLIPTLVERTS", "D3DPMISCCAPS_TSSARGTEMP", "D3DPMISCCAPS_BLENDOP", "D3DPMISCCAPS_NULLREFERENCE", "D3DPMISCCAPS_INDEPENDENTWRITEMASKS", "D3DPMISCCAPS_PERSTAGECONSTANT", "D3DPMISCCAPS_FOGANDSPECULARALPHA", "D3DPMISCCAPS_SEPARATEALPHABLEND", "D3DPMISCCAPS_MRTINDEPENDENTBITDEPTHS", "D3DPMISCCAPS_MRTPOSTPIXELSHADERBLENDING", "D3DPMISCCAPS_FOGVERTEXCLAMPED", "D3DPMISCCAPS_POSTBLENDSRGBCONVERT", ]) D3DLINECAPS = Flags(DWORD, [ "D3DLINECAPS_TEXTURE", "D3DLINECAPS_ZTEST", "D3DLINECAPS_BLEND", "D3DLINECAPS_ALPHACMP", "D3DLINECAPS_FOG", "D3DLINECAPS_ANTIALIAS", ]) D3DPRASTERCAPS = Flags(DWORD, [ "D3DPRASTERCAPS_DITHER", "D3DPRASTERCAPS_ZTEST", "D3DPRASTERCAPS_FOGVERTEX", "D3DPRASTERCAPS_FOGTABLE", "D3DPRASTERCAPS_MIPMAPLODBIAS", "D3DPRASTERCAPS_ZBUFFERLESSHSR", "D3DPRASTERCAPS_FOGRANGE", "D3DPRASTERCAPS_ANISOTROPY", "D3DPRASTERCAPS_WBUFFER", "D3DPRASTERCAPS_WFOG", "D3DPRASTERCAPS_ZFOG", "D3DPRASTERCAPS_COLORPERSPECTIVE", "D3DPRASTERCAPS_SCISSORTEST", "D3DPRASTERCAPS_SLOPESCALEDEPTHBIAS", "D3DPRASTERCAPS_DEPTHBIAS", "D3DPRASTERCAPS_MULTISAMPLE_TOGGLE", ]) D3DPCMPCAPS = Flags(DWORD, [ "D3DPCMPCAPS_NEVER", "D3DPCMPCAPS_LESS", "D3DPCMPCAPS_EQUAL", "D3DPCMPCAPS_LESSEQUAL", "D3DPCMPCAPS_GREATER", "D3DPCMPCAPS_NOTEQUAL", "D3DPCMPCAPS_GREATEREQUAL", "D3DPCMPCAPS_ALWAYS", ]) D3DPBLENDCAPS = Flags(DWORD, [ "D3DPBLENDCAPS_ZERO", "D3DPBLENDCAPS_ONE", "D3DPBLENDCAPS_SRCCOLOR", "D3DPBLENDCAPS_INVSRCCOLOR", "D3DPBLENDCAPS_SRCALPHA", "D3DPBLENDCAPS_INVSRCALPHA", "D3DPBLENDCAPS_DESTALPHA", "D3DPBLENDCAPS_INVDESTALPHA", "D3DPBLENDCAPS_DESTCOLOR", "D3DPBLENDCAPS_INVDESTCOLOR", "D3DPBLENDCAPS_SRCALPHASAT", "D3DPBLENDCAPS_BOTHSRCALPHA", "D3DPBLENDCAPS_BOTHINVSRCALPHA", "D3DPBLENDCAPS_BLENDFACTOR", "D3DPBLENDCAPS_SRCCOLOR2", "D3DPBLENDCAPS_INVSRCCOLOR2", ]) D3DPSHADECAPS = Flags(DWORD, [ "D3DPSHADECAPS_COLORGOURAUDRGB", "D3DPSHADECAPS_SPECULARGOURAUDRGB", "D3DPSHADECAPS_ALPHAGOURAUDBLEND", "D3DPSHADECAPS_FOGGOURAUD", ]) D3DPTEXTURECAPS = Flags(DWORD, [ "D3DPTEXTURECAPS_PERSPECTIVE", "D3DPTEXTURECAPS_POW2", "D3DPTEXTURECAPS_ALPHA", "D3DPTEXTURECAPS_SQUAREONLY", "D3DPTEXTURECAPS_TEXREPEATNOTSCALEDBYSIZE", "D3DPTEXTURECAPS_ALPHAPALETTE", "D3DPTEXTURECAPS_NONPOW2CONDITIONAL", "D3DPTEXTURECAPS_PROJECTED", "D3DPTEXTURECAPS_CUBEMAP", "D3DPTEXTURECAPS_VOLUMEMAP", "D3DPTEXTURECAPS_MIPMAP", "D3DPTEXTURECAPS_MIPVOLUMEMAP", "D3DPTEXTURECAPS_MIPCUBEMAP", "D3DPTEXTURECAPS_CUBEMAP_POW2", "D3DPTEXTURECAPS_VOLUMEMAP_POW2", "D3DPTEXTURECAPS_NOPROJECTEDBUMPENV", ]) D3DPTFILTERCAPS = Flags(DWORD, [ "D3DPTFILTERCAPS_MINFPOINT", "D3DPTFILTERCAPS_MINFLINEAR", "D3DPTFILTERCAPS_MINFANISOTROPIC", "D3DPTFILTERCAPS_MINFPYRAMIDALQUAD", "D3DPTFILTERCAPS_MINFGAUSSIANQUAD", "D3DPTFILTERCAPS_MIPFPOINT", "D3DPTFILTERCAPS_MIPFLINEAR", "D3DPTFILTERCAPS_CONVOLUTIONMONO", "D3DPTFILTERCAPS_MAGFPOINT", "D3DPTFILTERCAPS_MAGFLINEAR", "D3DPTFILTERCAPS_MAGFANISOTROPIC", "D3DPTFILTERCAPS_MAGFPYRAMIDALQUAD", "D3DPTFILTERCAPS_MAGFGAUSSIANQUAD", ]) D3DPTADDRESSCAPS = Flags(DWORD, [ "D3DPTADDRESSCAPS_WRAP", "D3DPTADDRESSCAPS_MIRROR", "D3DPTADDRESSCAPS_CLAMP", "D3DPTADDRESSCAPS_BORDER", "D3DPTADDRESSCAPS_INDEPENDENTUV", "D3DPTADDRESSCAPS_MIRRORONCE", ]) D3DSTENCILCAPS = Flags(DWORD, [ "D3DSTENCILCAPS_KEEP", "D3DSTENCILCAPS_ZERO", "D3DSTENCILCAPS_REPLACE", "D3DSTENCILCAPS_INCRSAT", "D3DSTENCILCAPS_DECRSAT", "D3DSTENCILCAPS_INVERT", "D3DSTENCILCAPS_INCR", "D3DSTENCILCAPS_DECR", "D3DSTENCILCAPS_TWOSIDED", ]) D3DTEXOPCAPS = Flags(DWORD, [ "D3DTEXOPCAPS_DISABLE", "D3DTEXOPCAPS_SELECTARG1", "D3DTEXOPCAPS_SELECTARG2", "D3DTEXOPCAPS_MODULATE", "D3DTEXOPCAPS_MODULATE2X", "D3DTEXOPCAPS_MODULATE4X", "D3DTEXOPCAPS_ADD", "D3DTEXOPCAPS_ADDSIGNED", "D3DTEXOPCAPS_ADDSIGNED2X", "D3DTEXOPCAPS_SUBTRACT", "D3DTEXOPCAPS_ADDSMOOTH", "D3DTEXOPCAPS_BLENDDIFFUSEALPHA", "D3DTEXOPCAPS_BLENDTEXTUREALPHA", "D3DTEXOPCAPS_BLENDFACTORALPHA", "D3DTEXOPCAPS_BLENDTEXTUREALPHAPM", "D3DTEXOPCAPS_BLENDCURRENTALPHA", "D3DTEXOPCAPS_PREMODULATE", "D3DTEXOPCAPS_MODULATEALPHA_ADDCOLOR", "D3DTEXOPCAPS_MODULATECOLOR_ADDALPHA", "D3DTEXOPCAPS_MODULATEINVALPHA_ADDCOLOR", "D3DTEXOPCAPS_MODULATEINVCOLOR_ADDALPHA", "D3DTEXOPCAPS_BUMPENVMAP", "D3DTEXOPCAPS_BUMPENVMAPLUMINANCE", "D3DTEXOPCAPS_DOTPRODUCT3", "D3DTEXOPCAPS_MULTIPLYADD", "D3DTEXOPCAPS_LERP", ]) D3DFVFCAPS = Flags(DWORD, [ "D3DFVFCAPS_TEXCOORDCOUNTMASK", "D3DFVFCAPS_DONOTSTRIPELEMENTS", "D3DFVFCAPS_PSIZE", ]) D3DVTXPCAPS = Flags(DWORD, [ "D3DVTXPCAPS_TEXGEN", "D3DVTXPCAPS_MATERIALSOURCE7", "D3DVTXPCAPS_DIRECTIONALLIGHTS", "D3DVTXPCAPS_POSITIONALLIGHTS", "D3DVTXPCAPS_LOCALVIEWER", "D3DVTXPCAPS_TWEENING", "D3DVTXPCAPS_TEXGEN_SPHEREMAP", "D3DVTXPCAPS_NO_TEXGEN_NONLOCALVIEWER", ]) D3DDEVCAPS2 = Flags(DWORD, [ "D3DDEVCAPS2_STREAMOFFSET", "D3DDEVCAPS2_DMAPNPATCH", "D3DDEVCAPS2_ADAPTIVETESSRTPATCH", "D3DDEVCAPS2_ADAPTIVETESSNPATCH", "D3DDEVCAPS2_CAN_STRETCHRECT_FROM_TEXTURES", "D3DDEVCAPS2_PRESAMPLEDDMAPNPATCH", "D3DDEVCAPS2_VERTEXELEMENTSCANSHARESTREAMOFFSET", ]) D3DDTCAPS = Flags(DWORD, [ "D3DDTCAPS_UBYTE4", "D3DDTCAPS_UBYTE4N", "D3DDTCAPS_SHORT2N", "D3DDTCAPS_SHORT4N", "D3DDTCAPS_USHORT2N", "D3DDTCAPS_USHORT4N", "D3DDTCAPS_UDEC3", "D3DDTCAPS_DEC3N", "D3DDTCAPS_FLOAT16_2", "D3DDTCAPS_FLOAT16_4", ]) #D3DPS_VERSION = Enum("DWORD", [ # "D3DPS_VERSION(0,0)", # "D3DPS_VERSION(1,0)", # "D3DPS_VERSION(1,1)", # "D3DPS_VERSION(1,2)", # "D3DPS_VERSION(1,3)", # "D3DPS_VERSION(1,4)", # "D3DPS_VERSION(2,0)", # "D3DPS_VERSION(3,0)", #]) D3DPS_VERSION = DWORD #D3DVS_VERSION = Enum("DWORD", [ # "D3DVS_VERSION(0,0)", # "D3DVS_VERSION(1,0)", # "D3DVS_VERSION(1,1)", # "D3DVS_VERSION(2,0)", # "D3DVS_VERSION(3,0)", #]) D3DVS_VERSION = DWORD D3DCAPS9 = Struct("D3DCAPS9", [ (D3DDEVTYPE, "DeviceType"), (UINT, "AdapterOrdinal"), (D3DCAPS, "Caps"), (D3DCAPS2, "Caps2"), (D3DCAPS3, "Caps3"), (D3DPRESENT_INTERVAL, "PresentationIntervals"), (D3DCURSORCAPS, "CursorCaps"), (D3DDEVCAPS, "DevCaps"), (D3DPMISCCAPS, "PrimitiveMiscCaps"), (D3DPRASTERCAPS, "RasterCaps"), (D3DPCMPCAPS, "ZCmpCaps"), (D3DPBLENDCAPS, "SrcBlendCaps"), (D3DPBLENDCAPS, "DestBlendCaps"), (D3DPCMPCAPS, "AlphaCmpCaps"), (D3DPSHADECAPS, "ShadeCaps"), (D3DPTEXTURECAPS, "TextureCaps"), (D3DPTFILTERCAPS, "TextureFilterCaps"), (D3DPTFILTERCAPS, "CubeTextureFilterCaps"), (D3DPTFILTERCAPS, "VolumeTextureFilterCaps"), (D3DPTADDRESSCAPS, "TextureAddressCaps"), (D3DPTADDRESSCAPS, "VolumeTextureAddressCaps"), (D3DLINECAPS, "LineCaps"), (DWORD, "MaxTextureWidth"), (DWORD, "MaxTextureHeight"), (DWORD, "MaxVolumeExtent"), (DWORD, "MaxTextureRepeat"), (DWORD, "MaxTextureAspectRatio"), (DWORD, "MaxAnisotropy"), (Float, "MaxVertexW"), (Float, "GuardBandLeft"), (Float, "GuardBandTop"), (Float, "GuardBandRight"), (Float, "GuardBandBottom"), (Float, "ExtentsAdjust"), (D3DSTENCILCAPS, "StencilCaps"), (D3DFVFCAPS, "FVFCaps"), (D3DTEXOPCAPS, "TextureOpCaps"), (DWORD, "MaxTextureBlendStages"), (DWORD, "MaxSimultaneousTextures"), (D3DVTXPCAPS, "VertexProcessingCaps"), (DWORD, "MaxActiveLights"), (DWORD, "MaxUserClipPlanes"), (DWORD, "MaxVertexBlendMatrices"), (DWORD, "MaxVertexBlendMatrixIndex"), (Float, "MaxPointSize"), (DWORD, "MaxPrimitiveCount"), (DWORD, "MaxVertexIndex"), (DWORD, "MaxStreams"), (DWORD, "MaxStreamStride"), (D3DVS_VERSION, "VertexShaderVersion"), (DWORD, "MaxVertexShaderConst"), (D3DPS_VERSION, "PixelShaderVersion"), (Float, "PixelShader1xMaxValue"), (D3DDEVCAPS2, "DevCaps2"), (Float, "MaxNpatchTessellationLevel"), (DWORD, "Reserved5"), (UINT, "MasterAdapterOrdinal"), (UINT, "AdapterOrdinalInGroup"), (UINT, "NumberOfAdaptersInGroup"), (D3DDTCAPS, "DeclTypes"), (DWORD, "NumSimultaneousRTs"), (D3DPTFILTERCAPS, "StretchRectFilterCaps"), (D3DVSHADERCAPS2_0, "VS20Caps"), (D3DPSHADERCAPS2_0, "PS20Caps"), (D3DPTFILTERCAPS, "VertexTextureFilterCaps"), (DWORD, "MaxVShaderInstructionsExecuted"), (DWORD, "MaxPShaderInstructionsExecuted"), (DWORD, "MaxVertexShader30InstructionSlots"), (DWORD, "MaxPixelShader30InstructionSlots"), ])
########################################################################## # # Copyright 2008-2009 VMware, Inc. # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ##########################################################################/ """d3d9caps.h""" from winapi import * from d3d9types import * D3DVS20CAPS = Flags(DWORD, [ "D3DVS20CAPS_PREDICATION", ]) D3DVSHADERCAPS2_0 = Struct("D3DVSHADERCAPS2_0", [ (D3DVS20CAPS, "Caps"), (INT, "DynamicFlowControlDepth"), (INT, "NumTemps"), (INT, "StaticFlowControlDepth"), ]) D3DPS20CAPS = Flags(DWORD, [ "D3DPS20CAPS_ARBITRARYSWIZZLE", "D3DPS20CAPS_GRADIENTINSTRUCTIONS", "D3DPS20CAPS_PREDICATION", "D3DPS20CAPS_NODEPENDENTREADLIMIT", "D3DPS20CAPS_NOTEXINSTRUCTIONLIMIT", ]) D3DPSHADERCAPS2_0 = Struct("D3DPSHADERCAPS2_0", [ (D3DPS20CAPS, "Caps"), (INT, "DynamicFlowControlDepth"), (INT, "NumTemps"), (INT, "StaticFlowControlDepth"), (INT, "NumInstructionSlots"), ]) D3DCAPS = Flags(DWORD, [ "D3DCAPS_READ_SCANLINE", ]) D3DCAPS2 = Flags(DWORD, [ "D3DCAPS2_FULLSCREENGAMMA", "D3DCAPS2_CANCALIBRATEGAMMA", "D3DCAPS2_RESERVED", "D3DCAPS2_CANMANAGERESOURCE", "D3DCAPS2_DYNAMICTEXTURES", "D3DCAPS2_CANAUTOGENMIPMAP", "D3DCAPS2_CANSHARERESOURCE", ]) D3DCAPS3 = Flags(DWORD, [ "D3DCAPS3_RESERVED", "D3DCAPS3_ALPHA_FULLSCREEN_FLIP_OR_DISCARD", "D3DCAPS3_LINEAR_TO_SRGB_PRESENTATION", "D3DCAPS3_COPY_TO_VIDMEM", "D3DCAPS3_COPY_TO_SYSTEMMEM", ]) D3DPRESENT_INTERVAL = Flags(DWORD, [ #"D3DPRESENT_INTERVAL_DEFAULT", # 0 "D3DPRESENT_INTERVAL_ONE", "D3DPRESENT_INTERVAL_TWO", "D3DPRESENT_INTERVAL_THREE", "D3DPRESENT_INTERVAL_FOUR", "D3DPRESENT_INTERVAL_IMMEDIATE", ]) D3DCURSORCAPS = Flags(DWORD, [ "D3DCURSORCAPS_COLOR", "D3DCURSORCAPS_LOWRES", ]) D3DDEVCAPS = Flags(DWORD, [ "D3DDEVCAPS_EXECUTESYSTEMMEMORY", "D3DDEVCAPS_EXECUTEVIDEOMEMORY", "D3DDEVCAPS_TLVERTEXSYSTEMMEMORY", "D3DDEVCAPS_TLVERTEXVIDEOMEMORY", "D3DDEVCAPS_TEXTURESYSTEMMEMORY", "D3DDEVCAPS_TEXTUREVIDEOMEMORY", "D3DDEVCAPS_DRAWPRIMTLVERTEX", "D3DDEVCAPS_CANRENDERAFTERFLIP", "D3DDEVCAPS_TEXTURENONLOCALVIDMEM", "D3DDEVCAPS_DRAWPRIMITIVES2", "D3DDEVCAPS_SEPARATETEXTUREMEMORIES", "D3DDEVCAPS_DRAWPRIMITIVES2EX", "D3DDEVCAPS_HWTRANSFORMANDLIGHT", "D3DDEVCAPS_CANBLTSYSTONONLOCAL", "D3DDEVCAPS_HWRASTERIZATION", "D3DDEVCAPS_PUREDEVICE", "D3DDEVCAPS_QUINTICRTPATCHES", "D3DDEVCAPS_RTPATCHES", "D3DDEVCAPS_RTPATCHHANDLEZERO", "D3DDEVCAPS_NPATCHES", ]) D3DPMISCCAPS = Flags(DWORD, [ "D3DPMISCCAPS_MASKZ", "D3DPMISCCAPS_CULLNONE", "D3DPMISCCAPS_CULLCW", "D3DPMISCCAPS_CULLCCW", "D3DPMISCCAPS_COLORWRITEENABLE", "D3DPMISCCAPS_CLIPPLANESCALEDPOINTS", "D3DPMISCCAPS_CLIPTLVERTS", "D3DPMISCCAPS_TSSARGTEMP", "D3DPMISCCAPS_BLENDOP", "D3DPMISCCAPS_NULLREFERENCE", "D3DPMISCCAPS_INDEPENDENTWRITEMASKS", "D3DPMISCCAPS_PERSTAGECONSTANT", "D3DPMISCCAPS_FOGANDSPECULARALPHA", "D3DPMISCCAPS_SEPARATEALPHABLEND", "D3DPMISCCAPS_MRTINDEPENDENTBITDEPTHS", "D3DPMISCCAPS_MRTPOSTPIXELSHADERBLENDING", "D3DPMISCCAPS_FOGVERTEXCLAMPED", "D3DPMISCCAPS_POSTBLENDSRGBCONVERT", ]) D3DLINECAPS = Flags(DWORD, [ "D3DLINECAPS_TEXTURE", "D3DLINECAPS_ZTEST", "D3DLINECAPS_BLEND", "D3DLINECAPS_ALPHACMP", "D3DLINECAPS_FOG", "D3DLINECAPS_ANTIALIAS", ]) D3DPRASTERCAPS = Flags(DWORD, [ "D3DPRASTERCAPS_DITHER", "D3DPRASTERCAPS_ZTEST", "D3DPRASTERCAPS_FOGVERTEX", "D3DPRASTERCAPS_FOGTABLE", "D3DPRASTERCAPS_MIPMAPLODBIAS", "D3DPRASTERCAPS_ZBUFFERLESSHSR", "D3DPRASTERCAPS_FOGRANGE", "D3DPRASTERCAPS_ANISOTROPY", "D3DPRASTERCAPS_WBUFFER", "D3DPRASTERCAPS_WFOG", "D3DPRASTERCAPS_ZFOG", "D3DPRASTERCAPS_COLORPERSPECTIVE", "D3DPRASTERCAPS_SCISSORTEST", "D3DPRASTERCAPS_SLOPESCALEDEPTHBIAS", "D3DPRASTERCAPS_DEPTHBIAS", "D3DPRASTERCAPS_MULTISAMPLE_TOGGLE", ]) D3DPCMPCAPS = Flags(DWORD, [ "D3DPCMPCAPS_NEVER", "D3DPCMPCAPS_LESS", "D3DPCMPCAPS_EQUAL", "D3DPCMPCAPS_LESSEQUAL", "D3DPCMPCAPS_GREATER", "D3DPCMPCAPS_NOTEQUAL", "D3DPCMPCAPS_GREATEREQUAL", "D3DPCMPCAPS_ALWAYS", ]) D3DPBLENDCAPS = Flags(DWORD, [ "D3DPBLENDCAPS_ZERO", "D3DPBLENDCAPS_ONE", "D3DPBLENDCAPS_SRCCOLOR", "D3DPBLENDCAPS_INVSRCCOLOR", "D3DPBLENDCAPS_SRCALPHA", "D3DPBLENDCAPS_INVSRCALPHA", "D3DPBLENDCAPS_DESTALPHA", "D3DPBLENDCAPS_INVDESTALPHA", "D3DPBLENDCAPS_DESTCOLOR", "D3DPBLENDCAPS_INVDESTCOLOR", "D3DPBLENDCAPS_SRCALPHASAT", "D3DPBLENDCAPS_BOTHSRCALPHA", "D3DPBLENDCAPS_BOTHINVSRCALPHA", "D3DPBLENDCAPS_BLENDFACTOR", "D3DPBLENDCAPS_SRCCOLOR2", "D3DPBLENDCAPS_INVSRCCOLOR2", ]) D3DPSHADECAPS = Flags(DWORD, [ "D3DPSHADECAPS_COLORGOURAUDRGB", "D3DPSHADECAPS_SPECULARGOURAUDRGB", "D3DPSHADECAPS_ALPHAGOURAUDBLEND", "D3DPSHADECAPS_FOGGOURAUD", ]) D3DPTEXTURECAPS = Flags(DWORD, [ "D3DPTEXTURECAPS_PERSPECTIVE", "D3DPTEXTURECAPS_POW2", "D3DPTEXTURECAPS_ALPHA", "D3DPTEXTURECAPS_SQUAREONLY", "D3DPTEXTURECAPS_TEXREPEATNOTSCALEDBYSIZE", "D3DPTEXTURECAPS_ALPHAPALETTE", "D3DPTEXTURECAPS_NONPOW2CONDITIONAL", "D3DPTEXTURECAPS_PROJECTED", "D3DPTEXTURECAPS_CUBEMAP", "D3DPTEXTURECAPS_VOLUMEMAP", "D3DPTEXTURECAPS_MIPMAP", "D3DPTEXTURECAPS_MIPVOLUMEMAP", "D3DPTEXTURECAPS_MIPCUBEMAP", "D3DPTEXTURECAPS_CUBEMAP_POW2", "D3DPTEXTURECAPS_VOLUMEMAP_POW2", "D3DPTEXTURECAPS_NOPROJECTEDBUMPENV", ]) D3DPTFILTERCAPS = Flags(DWORD, [ "D3DPTFILTERCAPS_MINFPOINT", "D3DPTFILTERCAPS_MINFLINEAR", "D3DPTFILTERCAPS_MINFANISOTROPIC", "D3DPTFILTERCAPS_MINFPYRAMIDALQUAD", "D3DPTFILTERCAPS_MINFGAUSSIANQUAD", "D3DPTFILTERCAPS_MIPFPOINT", "D3DPTFILTERCAPS_MIPFLINEAR", "D3DPTFILTERCAPS_CONVOLUTIONMONO", "D3DPTFILTERCAPS_MAGFPOINT", "D3DPTFILTERCAPS_MAGFLINEAR", "D3DPTFILTERCAPS_MAGFANISOTROPIC", "D3DPTFILTERCAPS_MAGFPYRAMIDALQUAD", "D3DPTFILTERCAPS_MAGFGAUSSIANQUAD", ]) D3DPTADDRESSCAPS = Flags(DWORD, [ "D3DPTADDRESSCAPS_WRAP", "D3DPTADDRESSCAPS_MIRROR", "D3DPTADDRESSCAPS_CLAMP", "D3DPTADDRESSCAPS_BORDER", "D3DPTADDRESSCAPS_INDEPENDENTUV", "D3DPTADDRESSCAPS_MIRRORONCE", ]) D3DSTENCILCAPS = Flags(DWORD, [ "D3DSTENCILCAPS_KEEP", "D3DSTENCILCAPS_ZERO", "D3DSTENCILCAPS_REPLACE", "D3DSTENCILCAPS_INCRSAT", "D3DSTENCILCAPS_DECRSAT", "D3DSTENCILCAPS_INVERT", "D3DSTENCILCAPS_INCR", "D3DSTENCILCAPS_DECR", "D3DSTENCILCAPS_TWOSIDED", ]) D3DTEXOPCAPS = Flags(DWORD, [ "D3DTEXOPCAPS_DISABLE", "D3DTEXOPCAPS_SELECTARG1", "D3DTEXOPCAPS_SELECTARG2", "D3DTEXOPCAPS_MODULATE", "D3DTEXOPCAPS_MODULATE2X", "D3DTEXOPCAPS_MODULATE4X", "D3DTEXOPCAPS_ADD", "D3DTEXOPCAPS_ADDSIGNED", "D3DTEXOPCAPS_ADDSIGNED2X", "D3DTEXOPCAPS_SUBTRACT", "D3DTEXOPCAPS_ADDSMOOTH", "D3DTEXOPCAPS_BLENDDIFFUSEALPHA", "D3DTEXOPCAPS_BLENDTEXTUREALPHA", "D3DTEXOPCAPS_BLENDFACTORALPHA", "D3DTEXOPCAPS_BLENDTEXTUREALPHAPM", "D3DTEXOPCAPS_BLENDCURRENTALPHA", "D3DTEXOPCAPS_PREMODULATE", "D3DTEXOPCAPS_MODULATEALPHA_ADDCOLOR", "D3DTEXOPCAPS_MODULATECOLOR_ADDALPHA", "D3DTEXOPCAPS_MODULATEINVALPHA_ADDCOLOR", "D3DTEXOPCAPS_MODULATEINVCOLOR_ADDALPHA", "D3DTEXOPCAPS_BUMPENVMAP", "D3DTEXOPCAPS_BUMPENVMAPLUMINANCE", "D3DTEXOPCAPS_DOTPRODUCT3", "D3DTEXOPCAPS_MULTIPLYADD", "D3DTEXOPCAPS_LERP", ]) D3DFVFCAPS = Flags(DWORD, [ "D3DFVFCAPS_TEXCOORDCOUNTMASK", "D3DFVFCAPS_DONOTSTRIPELEMENTS", "D3DFVFCAPS_PSIZE", ]) D3DVTXPCAPS = Flags(DWORD, [ "D3DVTXPCAPS_TEXGEN", "D3DVTXPCAPS_MATERIALSOURCE7", "D3DVTXPCAPS_DIRECTIONALLIGHTS", "D3DVTXPCAPS_POSITIONALLIGHTS", "D3DVTXPCAPS_LOCALVIEWER", "D3DVTXPCAPS_TWEENING", "D3DVTXPCAPS_TEXGEN_SPHEREMAP", "D3DVTXPCAPS_NO_TEXGEN_NONLOCALVIEWER", ]) D3DDEVCAPS2 = Flags(DWORD, [ "D3DDEVCAPS2_STREAMOFFSET", "D3DDEVCAPS2_DMAPNPATCH", "D3DDEVCAPS2_ADAPTIVETESSRTPATCH", "D3DDEVCAPS2_ADAPTIVETESSNPATCH", "D3DDEVCAPS2_CAN_STRETCHRECT_FROM_TEXTURES", "D3DDEVCAPS2_PRESAMPLEDDMAPNPATCH", "D3DDEVCAPS2_VERTEXELEMENTSCANSHARESTREAMOFFSET", ]) D3DDTCAPS = Flags(DWORD, [ "D3DDTCAPS_UBYTE4", "D3DDTCAPS_UBYTE4N", "D3DDTCAPS_SHORT2N", "D3DDTCAPS_SHORT4N", "D3DDTCAPS_USHORT2N", "D3DDTCAPS_USHORT4N", "D3DDTCAPS_UDEC3", "D3DDTCAPS_DEC3N", "D3DDTCAPS_FLOAT16_2", "D3DDTCAPS_FLOAT16_4", ]) #D3DPS_VERSION = Enum("DWORD", [ # "D3DPS_VERSION(0,0)", # "D3DPS_VERSION(1,0)", # "D3DPS_VERSION(1,1)", # "D3DPS_VERSION(1,2)", # "D3DPS_VERSION(1,3)", # "D3DPS_VERSION(1,4)", # "D3DPS_VERSION(2,0)", # "D3DPS_VERSION(3,0)", #]) D3DPS_VERSION = DWORD #D3DVS_VERSION = Enum("DWORD", [ # "D3DVS_VERSION(0,0)", # "D3DVS_VERSION(1,0)", # "D3DVS_VERSION(1,1)", # "D3DVS_VERSION(2,0)", # "D3DVS_VERSION(3,0)", #]) D3DVS_VERSION = DWORD D3DCAPS9 = Struct("D3DCAPS9", [ (D3DDEVTYPE, "DeviceType"), (UINT, "AdapterOrdinal"), (D3DCAPS, "Caps"), (D3DCAPS2, "Caps2"), (D3DCAPS3, "Caps3"), (D3DPRESENT_INTERVAL, "PresentationIntervals"), (D3DCURSORCAPS, "CursorCaps"), (D3DDEVCAPS, "DevCaps"), (D3DPMISCCAPS, "PrimitiveMiscCaps"), (D3DPRASTERCAPS, "RasterCaps"), (D3DPCMPCAPS, "ZCmpCaps"), (D3DPBLENDCAPS, "SrcBlendCaps"), (D3DPBLENDCAPS, "DestBlendCaps"), (D3DPCMPCAPS, "AlphaCmpCaps"), (D3DPSHADECAPS, "ShadeCaps"), (D3DPTEXTURECAPS, "TextureCaps"), (D3DPTFILTERCAPS, "TextureFilterCaps"), (D3DPTFILTERCAPS, "CubeTextureFilterCaps"), (D3DPTFILTERCAPS, "VolumeTextureFilterCaps"), (D3DPTADDRESSCAPS, "TextureAddressCaps"), (D3DPTADDRESSCAPS, "VolumeTextureAddressCaps"), (D3DLINECAPS, "LineCaps"), (DWORD, "MaxTextureWidth"), (DWORD, "MaxTextureHeight"), (DWORD, "MaxVolumeExtent"), (DWORD, "MaxTextureRepeat"), (DWORD, "MaxTextureAspectRatio"), (DWORD, "MaxAnisotropy"), (Float, "MaxVertexW"), (Float, "GuardBandLeft"), (Float, "GuardBandTop"), (Float, "GuardBandRight"), (Float, "GuardBandBottom"), (Float, "ExtentsAdjust"), (D3DSTENCILCAPS, "StencilCaps"), (D3DFVFCAPS, "FVFCaps"), (D3DTEXOPCAPS, "TextureOpCaps"), (DWORD, "MaxTextureBlendStages"), (DWORD, "MaxSimultaneousTextures"), (D3DVTXPCAPS, "VertexProcessingCaps"), (DWORD, "MaxActiveLights"), (DWORD, "MaxUserClipPlanes"), (DWORD, "MaxVertexBlendMatrices"), (DWORD, "MaxVertexBlendMatrixIndex"), (Float, "MaxPointSize"), (DWORD, "MaxPrimitiveCount"), (DWORD, "MaxVertexIndex"), (DWORD, "MaxStreams"), (DWORD, "MaxStreamStride"), (D3DVS_VERSION, "VertexShaderVersion"), (DWORD, "MaxVertexShaderConst"), (D3DPS_VERSION, "PixelShaderVersion"), (Float, "PixelShader1xMaxValue"), (D3DDEVCAPS2, "DevCaps2"), (Float, "MaxNpatchTessellationLevel"), (DWORD, "Reserved5"), (UINT, "MasterAdapterOrdinal"), (UINT, "AdapterOrdinalInGroup"), (UINT, "NumberOfAdaptersInGroup"), (D3DDTCAPS, "DeclTypes"), (DWORD, "NumSimultaneousRTs"), (D3DPTFILTERCAPS, "StretchRectFilterCaps"), (D3DVSHADERCAPS2_0, "VS20Caps"), (D3DPSHADERCAPS2_0, "PS20Caps"), (D3DPTFILTERCAPS, "VertexTextureFilterCaps"), (DWORD, "MaxVShaderInstructionsExecuted"), (DWORD, "MaxPShaderInstructionsExecuted"), (DWORD, "MaxVertexShader30InstructionSlots"), (DWORD, "MaxPixelShader30InstructionSlots"), ])
en
0.70016
########################################################################## # # Copyright 2008-2009 VMware, Inc. # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ##########################################################################/ d3d9caps.h #"D3DPRESENT_INTERVAL_DEFAULT", # 0 #D3DPS_VERSION = Enum("DWORD", [ # "D3DPS_VERSION(0,0)", # "D3DPS_VERSION(1,0)", # "D3DPS_VERSION(1,1)", # "D3DPS_VERSION(1,2)", # "D3DPS_VERSION(1,3)", # "D3DPS_VERSION(1,4)", # "D3DPS_VERSION(2,0)", # "D3DPS_VERSION(3,0)", #]) #D3DVS_VERSION = Enum("DWORD", [ # "D3DVS_VERSION(0,0)", # "D3DVS_VERSION(1,0)", # "D3DVS_VERSION(1,1)", # "D3DVS_VERSION(2,0)", # "D3DVS_VERSION(3,0)", #])
1.348868
1
miika_nlu/venv/Lib/site-packages/tqdm/_dist_ver.py
NimBuzz01/Project-Miika_SDGP
0
9870
<reponame>NimBuzz01/Project-Miika_SDGP<filename>miika_nlu/venv/Lib/site-packages/tqdm/_dist_ver.py<gh_stars>0 __version__ = '4.64.0'
__version__ = '4.64.0'
none
1
1.085728
1
gym_flock/envs/old/mapping.py
katetolstaya/gym-flock
19
9871
import gym from gym import spaces, error, utils from gym.utils import seeding import numpy as np import configparser from os import path import matplotlib.pyplot as plt from matplotlib.pyplot import gca font = {'family': 'sans-serif', 'weight': 'bold', 'size': 14} class MappingEnv(gym.Env): def __init__(self): # config_file = path.join(path.dirname(__file__), "params_flock.cfg") # config = configparser.ConfigParser() # config.read(config_file) # config = config['flock'] self.nearest_agents = 7 self.nearest_targets = 7 self.mean_pooling = True # normalize the adjacency matrix by the number of neighbors or not self.centralized = True # number states per agent self.nx_system = 4 # number of actions per agent self.nu = 2 # default problem parameters self.n_agents = 100 # int(config['network_size']) # self.comm_radius = 0.9 # float(config['comm_radius']) self.dt = 0.1 # #float(config['system_dt']) self.v_max = 5.0 # float(config['max_vel_init']) self.v_bias = self.v_max # intitialize state matrices self.x = None self.u = None self.mean_vel = None self.init_vel = None self.greedy_action = None self.diff = None self.r2 = None self.adj_mat = None self.adj_mat_mean = None self.diff_targets = None self.r2_targets = None self.target_observed = None self.state_network = None self.state_values = None self.reward = None self.max_accel = 1 # self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,), # dtype=np.float32) # # self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, ), # dtype=np.float32) # target initialization self.px_max = 100 self.py_max = 100 x = np.linspace(-1.0 * self.px_max, self.px_max, self.n_agents) y = np.linspace(-1.0 * self.py_max, self.py_max, self.n_agents) tx, ty = np.meshgrid(x, y) tx = tx.reshape((-1, 1)) ty = ty.reshape((-1, 1)) self.obs_rad = 2.0 self.obs_rad2 = self.obs_rad * self.obs_rad self.target_x = np.stack((tx, ty), axis=1).reshape((-1, 2)) self.target_unobserved = np.ones((self.n_agents * self.n_agents, 2), dtype=np.bool) # rendering initialization self.fig = None self.ax = None self.line1 = None self.line2 = None self.action_scalar = 10.0 self.seed() def reset(self): x = np.zeros((self.n_agents, self.nx_system)) self.target_unobserved = np.ones((self.n_agents * self.n_agents, 2), dtype=np.bool) x[:, 0] = np.random.uniform(low=-self.px_max, high=self.px_max, size=(self.n_agents,)) x[:, 1] = np.random.uniform(low=-self.py_max, high=self.py_max, size=(self.n_agents,)) #bias = np.random.uniform(low=-self.v_bias, high=self.v_bias, size=(2,)) x[:, 2] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_agents,)) #+ bias[0] x[:, 3] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_agents,)) #+ bias[1] # keep good initialization self.mean_vel = np.mean(x[:, 2:4], axis=0) self.init_vel = x[:, 2:4] self.x = x # self.a_net = self.get_connectivity(self.x) self.compute_helpers() return self.state_values, self.state_network def params_from_cfg(self, args): # TODO pass # # self.comm_radius = args.getfloat('comm_radius') # # self.comm_radius2 = self.comm_radius * self.comm_radius # # self.vr = 1 / self.comm_radius2 + np.log(self.comm_radius2) # # # # self.n_agents = args.getint('n_agents') # # self.r_max = self.r_max * np.sqrt(self.n_agents) # # # self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,), # # dtype=np.float32) # # # # self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, self.n_features), # # dtype=np.float32) # # self.v_max = args.getfloat('v_max') # self.v_bias = self.v_max # self.dt = args.getfloat('dt') def seed(self, seed=None): self.np_random, seed = seeding.np_random(seed) return [seed] def step(self, u): # u = np.reshape(u, (-1, 2)) assert u.shape == (self.n_agents, self.nu) u = np.clip(u, a_min=-self.max_accel, a_max=self.max_accel) self.u = u * self.action_scalar old_x = np.copy(self.x) # x position self.x[:, 0] = self.x[:, 0] + self.x[:, 2] * self.dt + self.u[:, 0] * self.dt * self.dt * 0.5 # y position self.x[:, 1] = self.x[:, 1] + self.x[:, 3] * self.dt + self.u[:, 1] * self.dt * self.dt * 0.5 # x velocity self.x[:, 2] = self.x[:, 2] + self.u[:, 0] * self.dt # y velocity self.x[:, 3] = self.x[:, 3] + self.u[:, 1] * self.dt # clip velocities self.x[:, 2:4] = np.clip(self.x[:, 2:4], -1.0*self.v_max, self.v_max) dist_traveled = np.sum(np.linalg.norm(self.x[:, 0:2] - old_x[:, 0:2], axis=1)) self.compute_helpers() done = (0 == np.sum(self.target_unobserved)) return (self.state_values, self.state_network), 10.0 * self.reward - dist_traveled, done, {} def compute_helpers(self): # TODO - check this, and initialize stuff in the init(), and try to make more efficient # Neighbors computations self.diff = self.x.reshape((self.n_agents, 1, self.nx_system)) - self.x.reshape( (1, self.n_agents, self.nx_system)) self.r2 = np.multiply(self.diff[:, :, 0], self.diff[:, :, 0]) + np.multiply(self.diff[:, :, 1], self.diff[:, :, 1]) np.fill_diagonal(self.r2, np.Inf) nearest = np.argsort(self.r2, axis=1) obs_neigh = np.zeros((self.n_agents, self.nearest_agents * 4)) self.adj_mat = np.zeros((self.n_agents, self.n_agents)) for i in range(self.nearest_agents): ind2, ind3 = np.meshgrid(nearest[:, i], range(4), indexing='ij') ind1, _ = np.meshgrid(range(self.n_agents), range(4), indexing='ij') obs_neigh[:, i * self.nx_system:(i + 1) * self.nx_system] = np.reshape( self.diff[ind1.flatten(), ind2.flatten(), ind3.flatten()], (-1, 4)) self.adj_mat[:, nearest[:, i]] = 1.0 # Normalize the adjacency matrix by the number of neighbors - results in mean pooling, instead of sum pooling n_neighbors = np.reshape(np.sum(self.adj_mat, axis=1), (self.n_agents, 1)) # correct - checked this n_neighbors[n_neighbors == 0] = 1 self.adj_mat_mean = self.adj_mat / n_neighbors # Targets computations self.diff_targets = self.x[:, 0:2].reshape((self.n_agents, 1, 2)) - self.target_x[ self.target_unobserved].reshape( (1, -1, 2)) self.r2_targets = np.multiply(self.diff_targets[:, :, 0], self.diff_targets[:, :, 0]) + np.multiply( self.diff_targets[:, :, 1], self.diff_targets[:, :, 1]) nearest_targets = np.argsort(self.r2_targets, axis=1) obs_target = np.zeros((self.n_agents, self.nearest_targets * 2)) for i in range(min(self.nearest_targets, np.shape(nearest_targets)[1])): ind2, ind3 = np.meshgrid(nearest_targets[:, i], range(2), indexing='ij') ind1, _ = np.meshgrid(range(self.n_agents), range(2), indexing='ij') obs_target[:, i * 2:(i + 1) * 2] = np.reshape( self.diff_targets[ind1.flatten(), ind2.flatten(), ind3.flatten()], (-1, 2)) self.target_observed = np.any(self.r2_targets < self.obs_rad2, axis=0).reshape((-1, 1)) self.target_unobserved[self.target_unobserved] = np.tile(np.logical_not(self.target_observed), (1, 2)).flatten() self.reward = np.sum(self.target_observed.astype(np.int)) self.state_values = np.hstack((obs_neigh, obs_target)) self.greedy_action = -1.0 * obs_target[:, 0:2] if self.mean_pooling: self.state_network = self.adj_mat_mean else: self.state_network = self.adj_mat def controller(self): """ The controller for flocking from Turner 2003. Returns: the optimal action """ # TODO # return np.zeros((self.n_agents, 2)) return self.greedy_action / 10.0 def render(self, mode='human'): """ Render the environment with agents as points in 2D space """ if self.fig is None: plt.ion() fig = plt.figure() self.ax = fig.add_subplot(111) line1, = self.ax.plot(self.x[:, 0], self.x[:, 1], 'bo') locs = self.target_x[self.target_unobserved].reshape((-1, 2)) line2, = self.ax.plot(locs[:, 0], locs[:, 1], 'rx') plt.ylim(-1.0 * self.py_max, 1.0 * self.py_max) plt.xlim(-1.0 * self.px_max, 1.0 * self.px_max) a = gca() a.set_xticklabels(a.get_xticks(), font) a.set_yticklabels(a.get_yticks(), font) plt.title('GNN Controller') self.fig = fig self.line1 = line1 self.line2 = line2 # TODO render unobserved targets else: self.line1.set_xdata(self.x[:, 0]) self.line1.set_ydata(self.x[:, 1]) locs = self.target_x[self.target_unobserved].reshape((-1,2)) self.line2.set_xdata(locs[:, 0]) self.line2.set_ydata(locs[:, 1]) self.fig.canvas.draw() self.fig.canvas.flush_events() def close(self): pass
import gym from gym import spaces, error, utils from gym.utils import seeding import numpy as np import configparser from os import path import matplotlib.pyplot as plt from matplotlib.pyplot import gca font = {'family': 'sans-serif', 'weight': 'bold', 'size': 14} class MappingEnv(gym.Env): def __init__(self): # config_file = path.join(path.dirname(__file__), "params_flock.cfg") # config = configparser.ConfigParser() # config.read(config_file) # config = config['flock'] self.nearest_agents = 7 self.nearest_targets = 7 self.mean_pooling = True # normalize the adjacency matrix by the number of neighbors or not self.centralized = True # number states per agent self.nx_system = 4 # number of actions per agent self.nu = 2 # default problem parameters self.n_agents = 100 # int(config['network_size']) # self.comm_radius = 0.9 # float(config['comm_radius']) self.dt = 0.1 # #float(config['system_dt']) self.v_max = 5.0 # float(config['max_vel_init']) self.v_bias = self.v_max # intitialize state matrices self.x = None self.u = None self.mean_vel = None self.init_vel = None self.greedy_action = None self.diff = None self.r2 = None self.adj_mat = None self.adj_mat_mean = None self.diff_targets = None self.r2_targets = None self.target_observed = None self.state_network = None self.state_values = None self.reward = None self.max_accel = 1 # self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,), # dtype=np.float32) # # self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, ), # dtype=np.float32) # target initialization self.px_max = 100 self.py_max = 100 x = np.linspace(-1.0 * self.px_max, self.px_max, self.n_agents) y = np.linspace(-1.0 * self.py_max, self.py_max, self.n_agents) tx, ty = np.meshgrid(x, y) tx = tx.reshape((-1, 1)) ty = ty.reshape((-1, 1)) self.obs_rad = 2.0 self.obs_rad2 = self.obs_rad * self.obs_rad self.target_x = np.stack((tx, ty), axis=1).reshape((-1, 2)) self.target_unobserved = np.ones((self.n_agents * self.n_agents, 2), dtype=np.bool) # rendering initialization self.fig = None self.ax = None self.line1 = None self.line2 = None self.action_scalar = 10.0 self.seed() def reset(self): x = np.zeros((self.n_agents, self.nx_system)) self.target_unobserved = np.ones((self.n_agents * self.n_agents, 2), dtype=np.bool) x[:, 0] = np.random.uniform(low=-self.px_max, high=self.px_max, size=(self.n_agents,)) x[:, 1] = np.random.uniform(low=-self.py_max, high=self.py_max, size=(self.n_agents,)) #bias = np.random.uniform(low=-self.v_bias, high=self.v_bias, size=(2,)) x[:, 2] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_agents,)) #+ bias[0] x[:, 3] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_agents,)) #+ bias[1] # keep good initialization self.mean_vel = np.mean(x[:, 2:4], axis=0) self.init_vel = x[:, 2:4] self.x = x # self.a_net = self.get_connectivity(self.x) self.compute_helpers() return self.state_values, self.state_network def params_from_cfg(self, args): # TODO pass # # self.comm_radius = args.getfloat('comm_radius') # # self.comm_radius2 = self.comm_radius * self.comm_radius # # self.vr = 1 / self.comm_radius2 + np.log(self.comm_radius2) # # # # self.n_agents = args.getint('n_agents') # # self.r_max = self.r_max * np.sqrt(self.n_agents) # # # self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,), # # dtype=np.float32) # # # # self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, self.n_features), # # dtype=np.float32) # # self.v_max = args.getfloat('v_max') # self.v_bias = self.v_max # self.dt = args.getfloat('dt') def seed(self, seed=None): self.np_random, seed = seeding.np_random(seed) return [seed] def step(self, u): # u = np.reshape(u, (-1, 2)) assert u.shape == (self.n_agents, self.nu) u = np.clip(u, a_min=-self.max_accel, a_max=self.max_accel) self.u = u * self.action_scalar old_x = np.copy(self.x) # x position self.x[:, 0] = self.x[:, 0] + self.x[:, 2] * self.dt + self.u[:, 0] * self.dt * self.dt * 0.5 # y position self.x[:, 1] = self.x[:, 1] + self.x[:, 3] * self.dt + self.u[:, 1] * self.dt * self.dt * 0.5 # x velocity self.x[:, 2] = self.x[:, 2] + self.u[:, 0] * self.dt # y velocity self.x[:, 3] = self.x[:, 3] + self.u[:, 1] * self.dt # clip velocities self.x[:, 2:4] = np.clip(self.x[:, 2:4], -1.0*self.v_max, self.v_max) dist_traveled = np.sum(np.linalg.norm(self.x[:, 0:2] - old_x[:, 0:2], axis=1)) self.compute_helpers() done = (0 == np.sum(self.target_unobserved)) return (self.state_values, self.state_network), 10.0 * self.reward - dist_traveled, done, {} def compute_helpers(self): # TODO - check this, and initialize stuff in the init(), and try to make more efficient # Neighbors computations self.diff = self.x.reshape((self.n_agents, 1, self.nx_system)) - self.x.reshape( (1, self.n_agents, self.nx_system)) self.r2 = np.multiply(self.diff[:, :, 0], self.diff[:, :, 0]) + np.multiply(self.diff[:, :, 1], self.diff[:, :, 1]) np.fill_diagonal(self.r2, np.Inf) nearest = np.argsort(self.r2, axis=1) obs_neigh = np.zeros((self.n_agents, self.nearest_agents * 4)) self.adj_mat = np.zeros((self.n_agents, self.n_agents)) for i in range(self.nearest_agents): ind2, ind3 = np.meshgrid(nearest[:, i], range(4), indexing='ij') ind1, _ = np.meshgrid(range(self.n_agents), range(4), indexing='ij') obs_neigh[:, i * self.nx_system:(i + 1) * self.nx_system] = np.reshape( self.diff[ind1.flatten(), ind2.flatten(), ind3.flatten()], (-1, 4)) self.adj_mat[:, nearest[:, i]] = 1.0 # Normalize the adjacency matrix by the number of neighbors - results in mean pooling, instead of sum pooling n_neighbors = np.reshape(np.sum(self.adj_mat, axis=1), (self.n_agents, 1)) # correct - checked this n_neighbors[n_neighbors == 0] = 1 self.adj_mat_mean = self.adj_mat / n_neighbors # Targets computations self.diff_targets = self.x[:, 0:2].reshape((self.n_agents, 1, 2)) - self.target_x[ self.target_unobserved].reshape( (1, -1, 2)) self.r2_targets = np.multiply(self.diff_targets[:, :, 0], self.diff_targets[:, :, 0]) + np.multiply( self.diff_targets[:, :, 1], self.diff_targets[:, :, 1]) nearest_targets = np.argsort(self.r2_targets, axis=1) obs_target = np.zeros((self.n_agents, self.nearest_targets * 2)) for i in range(min(self.nearest_targets, np.shape(nearest_targets)[1])): ind2, ind3 = np.meshgrid(nearest_targets[:, i], range(2), indexing='ij') ind1, _ = np.meshgrid(range(self.n_agents), range(2), indexing='ij') obs_target[:, i * 2:(i + 1) * 2] = np.reshape( self.diff_targets[ind1.flatten(), ind2.flatten(), ind3.flatten()], (-1, 2)) self.target_observed = np.any(self.r2_targets < self.obs_rad2, axis=0).reshape((-1, 1)) self.target_unobserved[self.target_unobserved] = np.tile(np.logical_not(self.target_observed), (1, 2)).flatten() self.reward = np.sum(self.target_observed.astype(np.int)) self.state_values = np.hstack((obs_neigh, obs_target)) self.greedy_action = -1.0 * obs_target[:, 0:2] if self.mean_pooling: self.state_network = self.adj_mat_mean else: self.state_network = self.adj_mat def controller(self): """ The controller for flocking from Turner 2003. Returns: the optimal action """ # TODO # return np.zeros((self.n_agents, 2)) return self.greedy_action / 10.0 def render(self, mode='human'): """ Render the environment with agents as points in 2D space """ if self.fig is None: plt.ion() fig = plt.figure() self.ax = fig.add_subplot(111) line1, = self.ax.plot(self.x[:, 0], self.x[:, 1], 'bo') locs = self.target_x[self.target_unobserved].reshape((-1, 2)) line2, = self.ax.plot(locs[:, 0], locs[:, 1], 'rx') plt.ylim(-1.0 * self.py_max, 1.0 * self.py_max) plt.xlim(-1.0 * self.px_max, 1.0 * self.px_max) a = gca() a.set_xticklabels(a.get_xticks(), font) a.set_yticklabels(a.get_yticks(), font) plt.title('GNN Controller') self.fig = fig self.line1 = line1 self.line2 = line2 # TODO render unobserved targets else: self.line1.set_xdata(self.x[:, 0]) self.line1.set_ydata(self.x[:, 1]) locs = self.target_x[self.target_unobserved].reshape((-1,2)) self.line2.set_xdata(locs[:, 0]) self.line2.set_ydata(locs[:, 1]) self.fig.canvas.draw() self.fig.canvas.flush_events() def close(self): pass
en
0.39434
# config_file = path.join(path.dirname(__file__), "params_flock.cfg") # config = configparser.ConfigParser() # config.read(config_file) # config = config['flock'] # normalize the adjacency matrix by the number of neighbors or not # number states per agent # number of actions per agent # default problem parameters # int(config['network_size']) # self.comm_radius = 0.9 # float(config['comm_radius']) # #float(config['system_dt']) # float(config['max_vel_init']) # intitialize state matrices # self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,), # dtype=np.float32) # # self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, ), # dtype=np.float32) # target initialization # rendering initialization #bias = np.random.uniform(low=-self.v_bias, high=self.v_bias, size=(2,)) #+ bias[0] #+ bias[1] # keep good initialization # self.a_net = self.get_connectivity(self.x) # TODO # # self.comm_radius = args.getfloat('comm_radius') # # self.comm_radius2 = self.comm_radius * self.comm_radius # # self.vr = 1 / self.comm_radius2 + np.log(self.comm_radius2) # # # # self.n_agents = args.getint('n_agents') # # self.r_max = self.r_max * np.sqrt(self.n_agents) # # # self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2 * self.n_agents,), # # dtype=np.float32) # # # # self.observation_space = spaces.Box(low=-np.Inf, high=np.Inf, shape=(self.n_agents, self.n_features), # # dtype=np.float32) # # self.v_max = args.getfloat('v_max') # self.v_bias = self.v_max # self.dt = args.getfloat('dt') # u = np.reshape(u, (-1, 2)) # x position # y position # x velocity # y velocity # clip velocities # TODO - check this, and initialize stuff in the init(), and try to make more efficient # Neighbors computations # Normalize the adjacency matrix by the number of neighbors - results in mean pooling, instead of sum pooling # correct - checked this # Targets computations The controller for flocking from Turner 2003. Returns: the optimal action # TODO # return np.zeros((self.n_agents, 2)) Render the environment with agents as points in 2D space # TODO render unobserved targets
2.339267
2
tensorflow/python/kernel_tests/lu_op_test.py
PaulWang1905/tensorflow
36
9872
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.tf.Lu.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.client import session from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import map_fn from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import variables from tensorflow.python.platform import benchmark from tensorflow.python.platform import test class LuOpTest(test.TestCase): @property def float_types(self): return set((np.float64, np.float32, np.complex64, np.complex128)) def _verifyLuBase(self, x, lower, upper, perm, verification, output_idx_type): lower_np, upper_np, perm_np, verification_np = self.evaluate( [lower, upper, perm, verification]) self.assertAllClose(x, verification_np) self.assertShapeEqual(x, lower) self.assertShapeEqual(x, upper) self.assertAllEqual(x.shape[:-1], perm.shape.as_list()) # Check dtypes are as expected. self.assertEqual(x.dtype, lower_np.dtype) self.assertEqual(x.dtype, upper_np.dtype) self.assertEqual(output_idx_type.as_numpy_dtype, perm_np.dtype) # Check that the permutation is valid. if perm_np.shape[-1] > 0: perm_reshaped = np.reshape(perm_np, (-1, perm_np.shape[-1])) for perm_vector in perm_reshaped: self.assertAllClose(np.arange(len(perm_vector)), np.sort(perm_vector)) def _verifyLu(self, x, output_idx_type=dtypes.int64): # Verify that Px = LU. lu, perm = linalg_ops.lu(x, output_idx_type=output_idx_type) # Prepare the lower factor of shape num_rows x num_rows lu_shape = np.array(lu.shape.as_list()) batch_shape = lu_shape[:-2] num_rows = lu_shape[-2] num_cols = lu_shape[-1] lower = array_ops.matrix_band_part(lu, -1, 0) if num_rows > num_cols: eye = linalg_ops.eye( num_rows, batch_shape=batch_shape, dtype=lower.dtype) lower = array_ops.concat([lower, eye[..., num_cols:]], axis=-1) elif num_rows < num_cols: lower = lower[..., :num_rows] # Fill the diagonal with ones. ones_diag = array_ops.ones( np.append(batch_shape, num_rows), dtype=lower.dtype) lower = array_ops.matrix_set_diag(lower, ones_diag) # Prepare the upper factor. upper = array_ops.matrix_band_part(lu, 0, -1) verification = math_ops.matmul(lower, upper) # Permute the rows of product of the Cholesky factors. if num_rows > 0: # Reshape the product of the triangular factors and permutation indices # to a single batch dimension. This makes it easy to apply # invert_permutation and gather_nd ops. perm_reshaped = array_ops.reshape(perm, [-1, num_rows]) verification_reshaped = array_ops.reshape(verification, [-1, num_rows, num_cols]) # Invert the permutation in each batch. inv_perm_reshaped = map_fn.map_fn(array_ops.invert_permutation, perm_reshaped) batch_size = perm_reshaped.shape.as_list()[0] # Prepare the batch indices with the same shape as the permutation. # The corresponding batch index is paired with each of the `num_rows` # permutation indices. batch_indices = math_ops.cast( array_ops.broadcast_to( math_ops.range(batch_size)[:, None], perm_reshaped.shape), dtype=output_idx_type) permuted_verification_reshaped = array_ops.gather_nd( verification_reshaped, array_ops.stack([batch_indices, inv_perm_reshaped], axis=-1)) # Reshape the verification matrix back to the original shape. verification = array_ops.reshape(permuted_verification_reshaped, lu_shape) self._verifyLuBase(x, lower, upper, perm, verification, output_idx_type) def testBasic(self): data = np.array([[4., -1., 2.], [-1., 6., 0], [10., 0., 5.]]) for dtype in (np.float32, np.float64): for output_idx_type in (dtypes.int32, dtypes.int64): self._verifyLu(data.astype(dtype), output_idx_type=output_idx_type) for dtype in (np.complex64, np.complex128): for output_idx_type in (dtypes.int32, dtypes.int64): complex_data = np.tril(1j * data, -1).astype(dtype) complex_data += np.triu(-1j * data, 1).astype(dtype) complex_data += data self._verifyLu(complex_data, output_idx_type=output_idx_type) def testPivoting(self): # This matrix triggers partial pivoting because the first diagonal entry # is small. data = np.array([[1e-9, 1., 0.], [1., 0., 0], [0., 1., 5]]) self._verifyLu(data.astype(np.float32)) for dtype in (np.float32, np.float64): self._verifyLu(data.astype(dtype)) _, p = linalg_ops.lu(data) p_val = self.evaluate([p]) # Make sure p_val is not the identity permutation. self.assertNotAllClose(np.arange(3), p_val) for dtype in (np.complex64, np.complex128): complex_data = np.tril(1j * data, -1).astype(dtype) complex_data += np.triu(-1j * data, 1).astype(dtype) complex_data += data self._verifyLu(complex_data) _, p = linalg_ops.lu(data) p_val = self.evaluate([p]) # Make sure p_val is not the identity permutation. self.assertNotAllClose(np.arange(3), p_val) def testInvalidMatrix(self): # LU factorization gives an error when the input is singular. # Note: A singular matrix may return without error but it won't be a valid # factorization. for dtype in self.float_types: with self.assertRaises(errors.InvalidArgumentError): self.evaluate( linalg_ops.lu( np.array([[1., 2., 3.], [2., 4., 6.], [2., 3., 4.]], dtype=dtype))) with self.assertRaises(errors.InvalidArgumentError): self.evaluate( linalg_ops.lu( np.array([[[1., 2., 3.], [2., 4., 6.], [1., 2., 3.]], [[1., 2., 3.], [3., 4., 5.], [5., 6., 7.]]], dtype=dtype))) def testBatch(self): simple_array = np.array([[[1., -1.], [2., 5.]]]) # shape (1, 2, 2) self._verifyLu(simple_array) self._verifyLu(np.vstack((simple_array, simple_array))) odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]]) self._verifyLu(np.vstack((odd_sized_array, odd_sized_array))) batch_size = 200 # Generate random matrices. np.random.seed(42) matrices = np.random.rand(batch_size, 5, 5) self._verifyLu(matrices) # Generate random complex valued matrices. np.random.seed(52) matrices = np.random.rand(batch_size, 5, 5) + 1j * np.random.rand(batch_size, 5, 5) self._verifyLu(matrices) def testLargeMatrix(self): # Generate random matrices. n = 500 np.random.seed(64) data = np.random.rand(n, n) self._verifyLu(data) # Generate random complex valued matrices. np.random.seed(129) data = np.random.rand(n, n) + 1j * np.random.rand(n, n) self._verifyLu(data) @test_util.run_v1_only("b/120545219") def testEmpty(self): self._verifyLu(np.empty([0, 2, 2])) self._verifyLu(np.empty([2, 0, 0])) @test_util.run_deprecated_v1 def testConcurrentExecutesWithoutError(self): matrix1 = random_ops.random_normal([5, 5], seed=42) matrix2 = random_ops.random_normal([5, 5], seed=42) lu1, p1 = linalg_ops.lu(matrix1) lu2, p2 = linalg_ops.lu(matrix2) lu1_val, p1_val, lu2_val, p2_val = self.evaluate([lu1, p1, lu2, p2]) self.assertAllEqual(lu1_val, lu2_val) self.assertAllEqual(p1_val, p2_val) class LuBenchmark(test.Benchmark): shapes = [ (4, 4), (10, 10), (16, 16), (101, 101), (256, 256), (1000, 1000), (1024, 1024), (2048, 2048), (4096, 4096), (513, 2, 2), (513, 8, 8), (513, 256, 256), (4, 513, 2, 2), ] def _GenerateMatrix(self, shape): batch_shape = shape[:-2] shape = shape[-2:] assert shape[0] == shape[1] n = shape[0] matrix = np.ones(shape).astype(np.float32) / (2.0 * n) + np.diag( np.ones(n).astype(np.float32)) return np.tile(matrix, batch_shape + (1, 1)) def benchmarkLuOp(self): for shape in self.shapes: with ops.Graph().as_default(), \ session.Session(config=benchmark.benchmark_config()) as sess, \ ops.device("/cpu:0"): matrix = variables.Variable(self._GenerateMatrix(shape)) lu, p = linalg_ops.lu(matrix) variables.global_variables_initializer().run() self.run_op_benchmark( sess, control_flow_ops.group(lu, p), min_iters=25, name="lu_cpu_{shape}".format(shape=shape)) if test.is_gpu_available(True): with ops.Graph().as_default(), \ session.Session(config=benchmark.benchmark_config()) as sess, \ ops.device("/device:GPU:0"): matrix = variables.Variable(self._GenerateMatrix(shape)) lu, p = linalg_ops.lu(matrix) variables.global_variables_initializer().run() self.run_op_benchmark( sess, control_flow_ops.group(lu, p), min_iters=25, name="lu_gpu_{shape}".format(shape=shape)) if __name__ == "__main__": test.main()
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.tf.Lu.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.client import session from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import map_fn from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import variables from tensorflow.python.platform import benchmark from tensorflow.python.platform import test class LuOpTest(test.TestCase): @property def float_types(self): return set((np.float64, np.float32, np.complex64, np.complex128)) def _verifyLuBase(self, x, lower, upper, perm, verification, output_idx_type): lower_np, upper_np, perm_np, verification_np = self.evaluate( [lower, upper, perm, verification]) self.assertAllClose(x, verification_np) self.assertShapeEqual(x, lower) self.assertShapeEqual(x, upper) self.assertAllEqual(x.shape[:-1], perm.shape.as_list()) # Check dtypes are as expected. self.assertEqual(x.dtype, lower_np.dtype) self.assertEqual(x.dtype, upper_np.dtype) self.assertEqual(output_idx_type.as_numpy_dtype, perm_np.dtype) # Check that the permutation is valid. if perm_np.shape[-1] > 0: perm_reshaped = np.reshape(perm_np, (-1, perm_np.shape[-1])) for perm_vector in perm_reshaped: self.assertAllClose(np.arange(len(perm_vector)), np.sort(perm_vector)) def _verifyLu(self, x, output_idx_type=dtypes.int64): # Verify that Px = LU. lu, perm = linalg_ops.lu(x, output_idx_type=output_idx_type) # Prepare the lower factor of shape num_rows x num_rows lu_shape = np.array(lu.shape.as_list()) batch_shape = lu_shape[:-2] num_rows = lu_shape[-2] num_cols = lu_shape[-1] lower = array_ops.matrix_band_part(lu, -1, 0) if num_rows > num_cols: eye = linalg_ops.eye( num_rows, batch_shape=batch_shape, dtype=lower.dtype) lower = array_ops.concat([lower, eye[..., num_cols:]], axis=-1) elif num_rows < num_cols: lower = lower[..., :num_rows] # Fill the diagonal with ones. ones_diag = array_ops.ones( np.append(batch_shape, num_rows), dtype=lower.dtype) lower = array_ops.matrix_set_diag(lower, ones_diag) # Prepare the upper factor. upper = array_ops.matrix_band_part(lu, 0, -1) verification = math_ops.matmul(lower, upper) # Permute the rows of product of the Cholesky factors. if num_rows > 0: # Reshape the product of the triangular factors and permutation indices # to a single batch dimension. This makes it easy to apply # invert_permutation and gather_nd ops. perm_reshaped = array_ops.reshape(perm, [-1, num_rows]) verification_reshaped = array_ops.reshape(verification, [-1, num_rows, num_cols]) # Invert the permutation in each batch. inv_perm_reshaped = map_fn.map_fn(array_ops.invert_permutation, perm_reshaped) batch_size = perm_reshaped.shape.as_list()[0] # Prepare the batch indices with the same shape as the permutation. # The corresponding batch index is paired with each of the `num_rows` # permutation indices. batch_indices = math_ops.cast( array_ops.broadcast_to( math_ops.range(batch_size)[:, None], perm_reshaped.shape), dtype=output_idx_type) permuted_verification_reshaped = array_ops.gather_nd( verification_reshaped, array_ops.stack([batch_indices, inv_perm_reshaped], axis=-1)) # Reshape the verification matrix back to the original shape. verification = array_ops.reshape(permuted_verification_reshaped, lu_shape) self._verifyLuBase(x, lower, upper, perm, verification, output_idx_type) def testBasic(self): data = np.array([[4., -1., 2.], [-1., 6., 0], [10., 0., 5.]]) for dtype in (np.float32, np.float64): for output_idx_type in (dtypes.int32, dtypes.int64): self._verifyLu(data.astype(dtype), output_idx_type=output_idx_type) for dtype in (np.complex64, np.complex128): for output_idx_type in (dtypes.int32, dtypes.int64): complex_data = np.tril(1j * data, -1).astype(dtype) complex_data += np.triu(-1j * data, 1).astype(dtype) complex_data += data self._verifyLu(complex_data, output_idx_type=output_idx_type) def testPivoting(self): # This matrix triggers partial pivoting because the first diagonal entry # is small. data = np.array([[1e-9, 1., 0.], [1., 0., 0], [0., 1., 5]]) self._verifyLu(data.astype(np.float32)) for dtype in (np.float32, np.float64): self._verifyLu(data.astype(dtype)) _, p = linalg_ops.lu(data) p_val = self.evaluate([p]) # Make sure p_val is not the identity permutation. self.assertNotAllClose(np.arange(3), p_val) for dtype in (np.complex64, np.complex128): complex_data = np.tril(1j * data, -1).astype(dtype) complex_data += np.triu(-1j * data, 1).astype(dtype) complex_data += data self._verifyLu(complex_data) _, p = linalg_ops.lu(data) p_val = self.evaluate([p]) # Make sure p_val is not the identity permutation. self.assertNotAllClose(np.arange(3), p_val) def testInvalidMatrix(self): # LU factorization gives an error when the input is singular. # Note: A singular matrix may return without error but it won't be a valid # factorization. for dtype in self.float_types: with self.assertRaises(errors.InvalidArgumentError): self.evaluate( linalg_ops.lu( np.array([[1., 2., 3.], [2., 4., 6.], [2., 3., 4.]], dtype=dtype))) with self.assertRaises(errors.InvalidArgumentError): self.evaluate( linalg_ops.lu( np.array([[[1., 2., 3.], [2., 4., 6.], [1., 2., 3.]], [[1., 2., 3.], [3., 4., 5.], [5., 6., 7.]]], dtype=dtype))) def testBatch(self): simple_array = np.array([[[1., -1.], [2., 5.]]]) # shape (1, 2, 2) self._verifyLu(simple_array) self._verifyLu(np.vstack((simple_array, simple_array))) odd_sized_array = np.array([[[4., -1., 2.], [-1., 6., 0], [2., 0., 5.]]]) self._verifyLu(np.vstack((odd_sized_array, odd_sized_array))) batch_size = 200 # Generate random matrices. np.random.seed(42) matrices = np.random.rand(batch_size, 5, 5) self._verifyLu(matrices) # Generate random complex valued matrices. np.random.seed(52) matrices = np.random.rand(batch_size, 5, 5) + 1j * np.random.rand(batch_size, 5, 5) self._verifyLu(matrices) def testLargeMatrix(self): # Generate random matrices. n = 500 np.random.seed(64) data = np.random.rand(n, n) self._verifyLu(data) # Generate random complex valued matrices. np.random.seed(129) data = np.random.rand(n, n) + 1j * np.random.rand(n, n) self._verifyLu(data) @test_util.run_v1_only("b/120545219") def testEmpty(self): self._verifyLu(np.empty([0, 2, 2])) self._verifyLu(np.empty([2, 0, 0])) @test_util.run_deprecated_v1 def testConcurrentExecutesWithoutError(self): matrix1 = random_ops.random_normal([5, 5], seed=42) matrix2 = random_ops.random_normal([5, 5], seed=42) lu1, p1 = linalg_ops.lu(matrix1) lu2, p2 = linalg_ops.lu(matrix2) lu1_val, p1_val, lu2_val, p2_val = self.evaluate([lu1, p1, lu2, p2]) self.assertAllEqual(lu1_val, lu2_val) self.assertAllEqual(p1_val, p2_val) class LuBenchmark(test.Benchmark): shapes = [ (4, 4), (10, 10), (16, 16), (101, 101), (256, 256), (1000, 1000), (1024, 1024), (2048, 2048), (4096, 4096), (513, 2, 2), (513, 8, 8), (513, 256, 256), (4, 513, 2, 2), ] def _GenerateMatrix(self, shape): batch_shape = shape[:-2] shape = shape[-2:] assert shape[0] == shape[1] n = shape[0] matrix = np.ones(shape).astype(np.float32) / (2.0 * n) + np.diag( np.ones(n).astype(np.float32)) return np.tile(matrix, batch_shape + (1, 1)) def benchmarkLuOp(self): for shape in self.shapes: with ops.Graph().as_default(), \ session.Session(config=benchmark.benchmark_config()) as sess, \ ops.device("/cpu:0"): matrix = variables.Variable(self._GenerateMatrix(shape)) lu, p = linalg_ops.lu(matrix) variables.global_variables_initializer().run() self.run_op_benchmark( sess, control_flow_ops.group(lu, p), min_iters=25, name="lu_cpu_{shape}".format(shape=shape)) if test.is_gpu_available(True): with ops.Graph().as_default(), \ session.Session(config=benchmark.benchmark_config()) as sess, \ ops.device("/device:GPU:0"): matrix = variables.Variable(self._GenerateMatrix(shape)) lu, p = linalg_ops.lu(matrix) variables.global_variables_initializer().run() self.run_op_benchmark( sess, control_flow_ops.group(lu, p), min_iters=25, name="lu_gpu_{shape}".format(shape=shape)) if __name__ == "__main__": test.main()
en
0.825738
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== Tests for tensorflow.ops.tf.Lu. # Check dtypes are as expected. # Check that the permutation is valid. # Verify that Px = LU. # Prepare the lower factor of shape num_rows x num_rows # Fill the diagonal with ones. # Prepare the upper factor. # Permute the rows of product of the Cholesky factors. # Reshape the product of the triangular factors and permutation indices # to a single batch dimension. This makes it easy to apply # invert_permutation and gather_nd ops. # Invert the permutation in each batch. # Prepare the batch indices with the same shape as the permutation. # The corresponding batch index is paired with each of the `num_rows` # permutation indices. # Reshape the verification matrix back to the original shape. # This matrix triggers partial pivoting because the first diagonal entry # is small. # Make sure p_val is not the identity permutation. # Make sure p_val is not the identity permutation. # LU factorization gives an error when the input is singular. # Note: A singular matrix may return without error but it won't be a valid # factorization. # shape (1, 2, 2) # Generate random matrices. # Generate random complex valued matrices. # Generate random matrices. # Generate random complex valued matrices.
2.027581
2
aligner/features/processing.py
zhouyangnk/Montreal-Forced-Aligner
1
9873
import multiprocessing as mp import subprocess import shutil import os from ..helper import make_path_safe, thirdparty_binary, filter_scp from ..exceptions import CorpusError def mfcc_func(directory, job_name, mfcc_config_path): # pragma: no cover log_directory = os.path.join(directory, 'log') raw_mfcc_path = os.path.join(directory, 'raw_mfcc.{}.ark'.format(job_name)) raw_scp_path = os.path.join(directory, 'feats.{}.scp'.format(job_name)) log_path = os.path.join(log_directory, 'make_mfcc.{}.log'.format(job_name)) segment_path = os.path.join(directory, 'segments.{}'.format(job_name)) scp_path = os.path.join(directory, 'wav.{}.scp'.format(job_name)) with open(log_path, 'w') as f: if os.path.exists(segment_path): seg_proc = subprocess.Popen([thirdparty_binary('extract-segments'), 'scp,p:' + scp_path, segment_path, 'ark:-'], stdout=subprocess.PIPE, stderr=f) comp_proc = subprocess.Popen([thirdparty_binary('compute-mfcc-feats'), '--verbose=2', '--config=' + mfcc_config_path, 'ark:-', 'ark:-'], stdout=subprocess.PIPE, stderr=f, stdin=seg_proc.stdout) else: comp_proc = subprocess.Popen([thirdparty_binary('compute-mfcc-feats'), '--verbose=2', '--config=' + mfcc_config_path, 'scp,p:' + scp_path, 'ark:-'], stdout=subprocess.PIPE, stderr=f) copy_proc = subprocess.Popen([thirdparty_binary('copy-feats'), '--compress=true', 'ark:-', 'ark,scp:{},{}'.format(raw_mfcc_path, raw_scp_path)], stdin=comp_proc.stdout, stderr=f) copy_proc.wait() def init(env): os.environ = env def mfcc(mfcc_directory, num_jobs, feature_config, frequency_configs): """ Multiprocessing function that converts wav files into MFCCs See http://kaldi-asr.org/doc/feat.html and http://kaldi-asr.org/doc/compute-mfcc-feats_8cc.html for more details on how MFCCs are computed. Also see https://github.com/kaldi-asr/kaldi/blob/master/egs/wsj/s5/steps/make_mfcc.sh for the bash script this function was based on. Parameters ---------- mfcc_directory : str Directory to save MFCC feature matrices log_directory : str Directory to store log files num_jobs : int The number of processes to use in calculation mfcc_configs : list of :class:`~aligner.config.MfccConfig` Configuration object for generating MFCCs Raises ------ CorpusError If the files per speaker exceeds the number of files that are allowed to be open on the computer (for Unix-based systems) """ child_env = os.environ.copy() os.makedirs(os.path.join(mfcc_directory, 'log'), exist_ok=True) paths = [] for j, p in frequency_configs: paths.append(feature_config.write(mfcc_directory, j, p)) jobs = [(mfcc_directory, x, paths[x]) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool: r = False try: results = [pool.apply_async(mfcc_func, args=i) for i in jobs] output = [p.get() for p in results] except OSError as e: print(dir(e)) if e.errno == 24: r = True else: raise if r: raise (CorpusError( 'There were too many files per speaker to process based on your OS settings. Please try to split your data into more speakers.')) def apply_cmvn_func(directory, job_name, config): normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name)) normed_ark_path = os.path.join(directory, config.raw_feature_id + '.{}.ark'.format(job_name)) with open(os.path.join(directory, 'log', 'norm.{}.log'.format(job_name)), 'w') as logf: utt2spkpath = os.path.join(directory, 'utt2spk.{}'.format(job_name)) cmvnpath = os.path.join(directory, 'cmvn.{}.scp'.format(job_name)) featspath = os.path.join(directory, 'feats.{}.scp'.format(job_name)) if not os.path.exists(normed_scp_path): cmvn_proc = subprocess.Popen([thirdparty_binary('apply-cmvn'), '--utt2spk=ark:' + utt2spkpath, 'scp:' + cmvnpath, 'scp:' + featspath, 'ark,scp:{},{}'.format(normed_ark_path, normed_scp_path)], stderr=logf ) cmvn_proc.communicate() def apply_cmvn(directory, num_jobs, config): child_env = os.environ.copy() jobs = [(directory, x, config) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool: results = [pool.apply_async(apply_cmvn_func, args=i) for i in jobs] output = [p.get() for p in results] def add_deltas_func(directory, job_name, config): normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name)) ark_path = os.path.join(directory, config.feature_id + '.{}.ark'.format(job_name)) scp_path = os.path.join(directory, config.feature_id + '.{}.scp'.format(job_name)) with open(os.path.join(directory, 'log', 'add_deltas.{}.log'.format(job_name)), 'w') as logf: if config.fmllr_path is not None and os.path.exists(config.fmllr_path): deltas_proc = subprocess.Popen([thirdparty_binary('add-deltas'), 'scp:' + normed_scp_path, 'ark:-'], stderr=logf, stdout=subprocess.PIPE) trans_proc = subprocess.Popen([thirdparty_binary('transform-feats'), 'ark:' + config.fmllr_path, 'ark:-', 'ark,scp:{},{}'.format(ark_path, scp_path)], stdin=deltas_proc.stdout, stderr=logf) trans_proc.communicate() else: deltas_proc = subprocess.Popen([thirdparty_binary('add-deltas'), 'scp:' + normed_scp_path, 'ark,scp:{},{}'.format(ark_path, scp_path)], stderr=logf) deltas_proc.communicate() def add_deltas(directory, num_jobs, config): child_env = os.environ.copy() jobs = [(directory, x, config) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool: results = [pool.apply_async(add_deltas_func, args=i) for i in jobs] output = [p.get() for p in results] def apply_lda_func(directory, job_name, config): normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name)) ark_path = os.path.join(directory, config.feature_id + '.{}.ark'.format(job_name)) scp_path = os.path.join(directory, config.feature_id + '.{}.scp'.format(job_name)) ivector_scp_path = os.path.join(directory, 'ivector.{}.scp'.format(job_name)) with open(os.path.join(directory, 'log', 'lda.{}.log'.format(job_name)), 'a') as logf: if os.path.exists(config.lda_path): splice_feats_proc = subprocess.Popen([thirdparty_binary('splice-feats'), '--left-context={}'.format(config.splice_left_context), '--right-context={}'.format(config.splice_right_context), 'scp:' + normed_scp_path, 'ark:-'], stdout=subprocess.PIPE, stderr=logf) if config.ivectors and os.path.exists(ivector_scp_path): transform_feats_proc = subprocess.Popen([thirdparty_binary("transform-feats"), config.lda_path, 'ark:-', 'ark:-'], stdin=splice_feats_proc.stdout, stdout=subprocess.PIPE, stderr=logf) paste_proc = subprocess.Popen([thirdparty_binary('paste-feats'), 'ark:-', 'scp:' + ivector_scp_path, 'ark,scp:{},{}'.format(ark_path, scp_path)], stdin=transform_feats_proc.stdout, stderr=logf) paste_proc.communicate() else: transform_feats_proc = subprocess.Popen([thirdparty_binary("transform-feats"), config.lda_path, 'ark:-', 'ark,scp:{},{}'.format(ark_path, scp_path)], stdin=splice_feats_proc.stdout, stderr=logf) transform_feats_proc.communicate() else: logf.write('could not find "{}"\n'.format(config.lda_path)) splice_feats_proc = subprocess.Popen([thirdparty_binary('splice-feats'), '--left-context={}'.format(config.splice_left_context), '--right-context={}'.format(config.splice_right_context), 'scp:' + normed_scp_path, 'ark,scp:{},{}'.format(ark_path, scp_path)], stderr=logf) splice_feats_proc.communicate() def apply_lda(directory, num_jobs, config): jobs = [(directory, x, config) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(os.environ.copy(),)) as pool: results = [pool.apply_async(apply_lda_func, args=i) for i in jobs] output = [p.get() for p in results]
import multiprocessing as mp import subprocess import shutil import os from ..helper import make_path_safe, thirdparty_binary, filter_scp from ..exceptions import CorpusError def mfcc_func(directory, job_name, mfcc_config_path): # pragma: no cover log_directory = os.path.join(directory, 'log') raw_mfcc_path = os.path.join(directory, 'raw_mfcc.{}.ark'.format(job_name)) raw_scp_path = os.path.join(directory, 'feats.{}.scp'.format(job_name)) log_path = os.path.join(log_directory, 'make_mfcc.{}.log'.format(job_name)) segment_path = os.path.join(directory, 'segments.{}'.format(job_name)) scp_path = os.path.join(directory, 'wav.{}.scp'.format(job_name)) with open(log_path, 'w') as f: if os.path.exists(segment_path): seg_proc = subprocess.Popen([thirdparty_binary('extract-segments'), 'scp,p:' + scp_path, segment_path, 'ark:-'], stdout=subprocess.PIPE, stderr=f) comp_proc = subprocess.Popen([thirdparty_binary('compute-mfcc-feats'), '--verbose=2', '--config=' + mfcc_config_path, 'ark:-', 'ark:-'], stdout=subprocess.PIPE, stderr=f, stdin=seg_proc.stdout) else: comp_proc = subprocess.Popen([thirdparty_binary('compute-mfcc-feats'), '--verbose=2', '--config=' + mfcc_config_path, 'scp,p:' + scp_path, 'ark:-'], stdout=subprocess.PIPE, stderr=f) copy_proc = subprocess.Popen([thirdparty_binary('copy-feats'), '--compress=true', 'ark:-', 'ark,scp:{},{}'.format(raw_mfcc_path, raw_scp_path)], stdin=comp_proc.stdout, stderr=f) copy_proc.wait() def init(env): os.environ = env def mfcc(mfcc_directory, num_jobs, feature_config, frequency_configs): """ Multiprocessing function that converts wav files into MFCCs See http://kaldi-asr.org/doc/feat.html and http://kaldi-asr.org/doc/compute-mfcc-feats_8cc.html for more details on how MFCCs are computed. Also see https://github.com/kaldi-asr/kaldi/blob/master/egs/wsj/s5/steps/make_mfcc.sh for the bash script this function was based on. Parameters ---------- mfcc_directory : str Directory to save MFCC feature matrices log_directory : str Directory to store log files num_jobs : int The number of processes to use in calculation mfcc_configs : list of :class:`~aligner.config.MfccConfig` Configuration object for generating MFCCs Raises ------ CorpusError If the files per speaker exceeds the number of files that are allowed to be open on the computer (for Unix-based systems) """ child_env = os.environ.copy() os.makedirs(os.path.join(mfcc_directory, 'log'), exist_ok=True) paths = [] for j, p in frequency_configs: paths.append(feature_config.write(mfcc_directory, j, p)) jobs = [(mfcc_directory, x, paths[x]) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool: r = False try: results = [pool.apply_async(mfcc_func, args=i) for i in jobs] output = [p.get() for p in results] except OSError as e: print(dir(e)) if e.errno == 24: r = True else: raise if r: raise (CorpusError( 'There were too many files per speaker to process based on your OS settings. Please try to split your data into more speakers.')) def apply_cmvn_func(directory, job_name, config): normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name)) normed_ark_path = os.path.join(directory, config.raw_feature_id + '.{}.ark'.format(job_name)) with open(os.path.join(directory, 'log', 'norm.{}.log'.format(job_name)), 'w') as logf: utt2spkpath = os.path.join(directory, 'utt2spk.{}'.format(job_name)) cmvnpath = os.path.join(directory, 'cmvn.{}.scp'.format(job_name)) featspath = os.path.join(directory, 'feats.{}.scp'.format(job_name)) if not os.path.exists(normed_scp_path): cmvn_proc = subprocess.Popen([thirdparty_binary('apply-cmvn'), '--utt2spk=ark:' + utt2spkpath, 'scp:' + cmvnpath, 'scp:' + featspath, 'ark,scp:{},{}'.format(normed_ark_path, normed_scp_path)], stderr=logf ) cmvn_proc.communicate() def apply_cmvn(directory, num_jobs, config): child_env = os.environ.copy() jobs = [(directory, x, config) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool: results = [pool.apply_async(apply_cmvn_func, args=i) for i in jobs] output = [p.get() for p in results] def add_deltas_func(directory, job_name, config): normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name)) ark_path = os.path.join(directory, config.feature_id + '.{}.ark'.format(job_name)) scp_path = os.path.join(directory, config.feature_id + '.{}.scp'.format(job_name)) with open(os.path.join(directory, 'log', 'add_deltas.{}.log'.format(job_name)), 'w') as logf: if config.fmllr_path is not None and os.path.exists(config.fmllr_path): deltas_proc = subprocess.Popen([thirdparty_binary('add-deltas'), 'scp:' + normed_scp_path, 'ark:-'], stderr=logf, stdout=subprocess.PIPE) trans_proc = subprocess.Popen([thirdparty_binary('transform-feats'), 'ark:' + config.fmllr_path, 'ark:-', 'ark,scp:{},{}'.format(ark_path, scp_path)], stdin=deltas_proc.stdout, stderr=logf) trans_proc.communicate() else: deltas_proc = subprocess.Popen([thirdparty_binary('add-deltas'), 'scp:' + normed_scp_path, 'ark,scp:{},{}'.format(ark_path, scp_path)], stderr=logf) deltas_proc.communicate() def add_deltas(directory, num_jobs, config): child_env = os.environ.copy() jobs = [(directory, x, config) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(child_env,)) as pool: results = [pool.apply_async(add_deltas_func, args=i) for i in jobs] output = [p.get() for p in results] def apply_lda_func(directory, job_name, config): normed_scp_path = os.path.join(directory, config.raw_feature_id + '.{}.scp'.format(job_name)) ark_path = os.path.join(directory, config.feature_id + '.{}.ark'.format(job_name)) scp_path = os.path.join(directory, config.feature_id + '.{}.scp'.format(job_name)) ivector_scp_path = os.path.join(directory, 'ivector.{}.scp'.format(job_name)) with open(os.path.join(directory, 'log', 'lda.{}.log'.format(job_name)), 'a') as logf: if os.path.exists(config.lda_path): splice_feats_proc = subprocess.Popen([thirdparty_binary('splice-feats'), '--left-context={}'.format(config.splice_left_context), '--right-context={}'.format(config.splice_right_context), 'scp:' + normed_scp_path, 'ark:-'], stdout=subprocess.PIPE, stderr=logf) if config.ivectors and os.path.exists(ivector_scp_path): transform_feats_proc = subprocess.Popen([thirdparty_binary("transform-feats"), config.lda_path, 'ark:-', 'ark:-'], stdin=splice_feats_proc.stdout, stdout=subprocess.PIPE, stderr=logf) paste_proc = subprocess.Popen([thirdparty_binary('paste-feats'), 'ark:-', 'scp:' + ivector_scp_path, 'ark,scp:{},{}'.format(ark_path, scp_path)], stdin=transform_feats_proc.stdout, stderr=logf) paste_proc.communicate() else: transform_feats_proc = subprocess.Popen([thirdparty_binary("transform-feats"), config.lda_path, 'ark:-', 'ark,scp:{},{}'.format(ark_path, scp_path)], stdin=splice_feats_proc.stdout, stderr=logf) transform_feats_proc.communicate() else: logf.write('could not find "{}"\n'.format(config.lda_path)) splice_feats_proc = subprocess.Popen([thirdparty_binary('splice-feats'), '--left-context={}'.format(config.splice_left_context), '--right-context={}'.format(config.splice_right_context), 'scp:' + normed_scp_path, 'ark,scp:{},{}'.format(ark_path, scp_path)], stderr=logf) splice_feats_proc.communicate() def apply_lda(directory, num_jobs, config): jobs = [(directory, x, config) for x in range(num_jobs)] with mp.Pool(processes=num_jobs, initializer=init, initargs=(os.environ.copy(),)) as pool: results = [pool.apply_async(apply_lda_func, args=i) for i in jobs] output = [p.get() for p in results]
en
0.695262
# pragma: no cover Multiprocessing function that converts wav files into MFCCs See http://kaldi-asr.org/doc/feat.html and http://kaldi-asr.org/doc/compute-mfcc-feats_8cc.html for more details on how MFCCs are computed. Also see https://github.com/kaldi-asr/kaldi/blob/master/egs/wsj/s5/steps/make_mfcc.sh for the bash script this function was based on. Parameters ---------- mfcc_directory : str Directory to save MFCC feature matrices log_directory : str Directory to store log files num_jobs : int The number of processes to use in calculation mfcc_configs : list of :class:`~aligner.config.MfccConfig` Configuration object for generating MFCCs Raises ------ CorpusError If the files per speaker exceeds the number of files that are allowed to be open on the computer (for Unix-based systems)
2.135553
2
ffai/util/bothelper.py
tysen2k/ffai
0
9874
""" A number of static methods for interpretting the state of the fantasy football pitch that aren't required directly by the client """ from ffai.core import Game, Action, ActionType from ffai.core.procedure import * from ffai.util.pathfinding import * from typing import Optional, List, Dict class ActionSequence: def __init__(self, action_steps: List[Action], score: float = 0, description: str = ''): """ Creates a new ActionSequence - an ordered list of sequential Actions to attempt to undertake. :param action_steps: Sequence of action steps that form this action. :param score: A score representing the attractiveness of the move (default: 0) :param description: A debug string (default: '') """ # Note the intention of this object is that when the object is acting, as steps are completed, # they are removed from the move_sequence so the next move is always the top of the move_sequence # lis self.action_steps = action_steps self.score = score self.description = description def is_valid(self, game: Game) -> bool: pass def popleft(self): return self.action_steps.pop(0) #val = self.action_steps[0] #del self.action_steps[0] #return val def is_empty(self): return not self.action_steps class FfHeatMap: """ A heat map of a Blood Bowl field. A class for analysing zones of control for both teams """ def __init__(self, game: Game, team: Team): self.game=game self.team = team # Note that the edges are not on the field, but represent crowd squares self.units_friendly: List[List[float]] = [[0.0 for y in range(game.state.pitch.height)] for x in range(game.state.pitch.width)] self.units_opponent: List[List[float]] = [[0.0 for y in range(game.state.pitch.height)] for x in range(game.state.pitch.width)] def add_unit_paths(self, player:Player, paths: List[Path]): is_friendly: bool = player.team == self.team for path in paths: if is_friendly: self.units_friendly[path.steps[-1].x][path.steps[-1].y] += (1.0 - path.cost)*(1.0 - path.cost) else: self.units_opponent[path.steps[-1].x][path.steps[-1].y] += (1.0 - path.cost)*(1.0 - path.cost) def add_unit_by_paths(self, game: Game, paths: Dict[Player, List[Path]]): for player in paths.keys(): self.add_unit_paths(player, paths[player]) def add_players_moved(self, game: Game, players: List[Player]): for player in players: adjacents: List[Square] = game.get_adjacent_squares(player.position, occupied=True) self.units_friendly[player.position.x][player.position.y] += 1.0 for adjacent in adjacents: self.units_friendly[player.position.x][player.position.y] += 0.5 def get_ball_move_square_safety_score(self, square: Square) -> float: # Basic idea - identify safe regions to move the ball towards friendly_heat: float = self.units_friendly[square.x][square.y] opponent_heat: float = self.units_opponent[square.x][square.y] score: float = 30.0 * max(0.0, (1.0 - opponent_heat/2)) return score #score: float=0.0 #if opponent_heat < 0.25: score += 15.0 #if opponent_heat < 0.05: score += 15.0 #if opponent_heat < 1.5: score += 5 #if friendly_heat > 3.5: score += 10.0 #score += max(30.0, 5.0*(friendly_heat-opponent_heat)) return score def get_cage_necessity_score(self, square: Square) -> float: opponent_friendly: float = self.units_friendly[square.x][square.y] opponent_heat: float = self.units_opponent[square.x][square.y] score: float = 0.0 if opponent_heat < 0.4: score -= 80.0 # if opponent_friendly > opponent_heat: score -= max(30.0, 10.0*(opponent_friendly-opponent_heat)) # if opponent_heat <1.5: score -=5 # if opponent_heat > opponent_friendly: score += 10.0*(opponent_friendly-opponent_heat) return score def blitz_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_BLITZ: return False return True def handoff_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_HANDOFF: return False return True def foul_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_FOUL: return False return True def pass_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_PASS: return False return True def get_players(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = True, include_used: bool = True, include_off_pitch: bool = False, only_blockable: bool = False, only_used: bool = False) -> List[Player]: players: List[Player] = [] selected_players: List[Player] = [] for iteam in game.state.teams: if iteam == team and include_own: players.extend(iteam.players) if iteam != team and include_opp: players.extend(iteam.players) for player in players: if only_blockable and not player.state.up: continue if only_used and not player.state.used: continue if include_stunned or not player.state.stunned: if include_used or not player.state.used: if include_off_pitch or (player.position is not None and not game.is_out_of_bounds(player.position)): selected_players.append(player) return selected_players def caging_squares_north_east(game: Game, protect_square: Square) -> List[Square]: # * At it's simplest, a cage requires 4 platers in the North-East, South-East, South-West and North-West # * positions, relative to the ball carrier, such that there is no more than 3 squares between the players in # * each of those adjacent compass directions. # * # * 1 3 # * xx-xx # * xx-xx # * --o-- # * xx-xx # * xx-xx # * 3 4 # * # * pitch is 26 long # * # * # * Basically we need one player in each of the corners: 1-4, but spaced such that there is no gap of 3 squares. # * If the caging player is in 1-4, but next to ball carrier, he ensures this will automatically be me # * # * The only exception to this is when the ball carrier is on, or near, the sideline. Then return the squares # * that can otherwise form the cage. # * caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x <= game.state.pitch.width - 3: if y == game.state.pitch.height-2: caging_squares.append(game.get_square(x + 1, y + 1)) caging_squares.append(game.get_square(x + 2, y + 1)) caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) elif y == game.state.pitch.height-1: caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) else: caging_squares.append(game.get_square(x + 1, y + 1)) caging_squares.append(game.get_square(x + 1, y + 2)) caging_squares.append(game.get_square(x + 2, y + 1)) # caging_squares.append(game.state.pitch.get_square(x + 3, y + 3)) return caging_squares def caging_squares_north_west(game: Game, protect_square: Square) -> List[Square]: caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x >= 3: if y == game.state.pitch.height-2: caging_squares.append(game.get_square(x - 1, y + 1)) caging_squares.append(game.get_square(x - 2, y + 1)) caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) elif y == game.state.pitch.height-1: caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) else: caging_squares.append(game.get_square(x - 1, y + 1)) caging_squares.append(game.get_square(x - 1, y + 2)) caging_squares.append(game.get_square(x - 2, y + 1)) # caging_squares.append(game.state.pitch.get_square(x - 3, y + 3)) return caging_squares def caging_squares_south_west(game: Game, protect_square: Square) -> List[Square]: caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x >= 3: if y == 2: caging_squares.append(game.get_square(x - 1, y - 1)) caging_squares.append(game.get_square(x - 2, y - 1)) caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) elif y == 1: caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) else: caging_squares.append(game.get_square(x - 1, y - 1)) caging_squares.append(game.get_square(x - 1, y - 2)) caging_squares.append(game.get_square(x - 2, y - 1)) # caging_squares.append(game.state.pitch.get_square(x - 3, y - 3)) return caging_squares def caging_squares_south_east(game: Game, protect_square: Square) -> List[Square]: caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x <= game.state.pitch.width-3: if y == 2: caging_squares.append(game.get_square(x + 1, y - 1)) caging_squares.append(game.get_square(x + 2, y - 1)) caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) elif y == 1: caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) else: caging_squares.append(game.get_square(x + 1, y - 1)) caging_squares.append(game.get_square(x + 1, y - 2)) caging_squares.append(game.get_square(x + 2, y - 1)) # caging_squares.append(game.get_square(x + 3, y - 3)) return caging_squares def is_caging_position(game: Game, player: Player, protect_player: Player) -> bool: return player.position.distance(protect_player.position) <= 2 and not is_castle_position_of(game, player, protect_player) def has_player_within_n_squares(game: Game, units: List[Player], square: Square, num_squares: int) -> bool: for cur in units: if cur.position.distance(square) <= num_squares: return True return False def has_adjacent_player(game: Game, square: Square) -> bool: return not game.get_adjacent_players(square) def is_castle_position_of(game: Game, player1: Player, player2: Player) -> bool: return player1.position.x == player2.position.x or player1.position.y == player2.position.y def is_bishop_position_of(game: Game, player1: Player, player2: Player) -> bool: return abs(player1.position.x - player2.position.x) == abs(player1.position.y - player2.position.y) def attacker_would_surf(game: Game, attacker: Player, defender: Player) -> bool: if (defender.has_skill(Skill.SIDE_STEP) and not attacker.has_skill(Skill.GRAB)) or defender.has_skill(Skill.STAND_FIRM): return False if not attacker.position.is_adjacent(defender.position): return False return direct_surf_squares(game, attacker.position, defender.position) def direct_surf_squares(game: Game, attack_square: Square, defend_square: Square) -> bool: defender_on_sideline: bool = on_sideline(game, defend_square) defender_in_endzone: bool = on_endzone(game, defend_square) if defender_on_sideline and defend_square.x == attack_square.x: return True if defender_in_endzone and defend_square.y == attack_square.y: return True if defender_in_endzone and defender_on_sideline: return True return False def reverse_x_for_right(game: Game, team: Team, x: int) -> int: if not game.is_team_side(Square(13, 3), team): res = game.state.pitch.width - 1 - x else: res = x return res def reverse_x_for_left(game: Game, team: Team, x: int) -> int: if game.is_team_side(Square(13, 3), team): res = game.state.pitch.width - 1 - x else: res = x return res def on_sideline(game: Game, square: Square) -> bool: return square.y == 1 or square.y == game.state.pitch.height - 1 def on_endzone(game: Game, square: Square) -> bool: return square.x == 1 or square.x == game.state.pitch.width - 1 def on_los(game: Game, team: Team, square: Square) -> bool: return (reverse_x_for_right(game, team, square.x) == 13) and 4 < square.y < 21 def los_squares(game: Game, team: Team) -> List[Square]: squares: List[Square] = [ game.get_square(reverse_x_for_right(game, team, 13), 5), game.get_square(reverse_x_for_right(game, team, 13), 6), game.get_square(reverse_x_for_right(game, team, 13), 7), game.get_square(reverse_x_for_right(game, team, 13), 8), game.get_square(reverse_x_for_right(game, team, 13), 9), game.get_square(reverse_x_for_right(game, team, 13), 10), game.get_square(reverse_x_for_right(game, team, 13), 11) ] return squares def distance_to_sideline(game: Game, square: Square) -> int: return min(square.y - 1, game.state.pitch.height - square.y - 2) def is_endzone(game, square: Square) -> bool: return square.x == 1 or square.x == game.state.pitch.width - 1 def last_block_proc(game) -> Optional[Block]: for i in range(len(game.state.stack.items) - 1, -1, -1): if isinstance(game.state.stack.items[i], Block): block_proc = game.state.stack.items[i] return block_proc return None def is_adjacent_ball(game: Game, square: Square) -> bool: ball_square = game.get_ball_position() return ball_square is not None and ball_square.is_adjacent(square) def squares_within(game: Game, square: Square, distance: int) -> List[Square]: squares: List[Square] = [] for i in range(-distance, distance+1): for j in range(-distance, distance+1): cur_square = game.get_square(square.x+i, square.y+j) if cur_square != square and not game.is_out_of_bounds(cur_square): squares.append(cur_square) return squares def distance_to_defending_endzone(game: Game, team: Team, position: Square) -> int: res = reverse_x_for_right(game, team, position.x) - 1 return res def distance_to_scoring_endzone(game: Game, team: Team, position: Square) -> int: res = reverse_x_for_left(game, team, position.x) - 1 return res #return game.state.pitch.width - 1 - reverse_x_for_right(game, team, position.x) def players_in_scoring_endzone(game: Game, team: Team, include_own: bool = True, include_opp: bool = False) -> List[Player]: players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp) selected_players: List[Player] = [] for player in players: if in_scoring_endzone(game, team, player.position): selected_players.append(player) return selected_players def in_scoring_endzone(game: Game, team: Team, square: Square) -> bool: return reverse_x_for_left(game, team, square.x) == 1 def players_in_scoring_distance(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = False) -> List[Player]: players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned) selected_players: List[Player] = [] for player in players: if distance_to_scoring_endzone(game, team, player.position) <= player.num_moves_left(): selected_players.append(player) return selected_players def distance_to_nearest_player(game: Game, team: Team, square: Square, include_own: bool = True, include_opp: bool = True, only_used: bool = False, include_used: bool = True, include_stunned: bool = True, only_blockable: bool = False) -> int: opps: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, only_used=only_used, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable) cur_max = 100 for opp in opps: dist = opp.position.distance(square) cur_max = min(cur_max, dist) return cur_max def screening_distance(game: Game, from_square: Square, to_square: Square) -> float: # Return the "screening distance" between 3 squares. (To complete) # float dist =math.sqrt(math.pow(square.x - cur.position.x, 3) + math.pow(square.y - cur.position.y, 3)) return 0.0 def num_opponents_can_reach(game: Game, team: Team, square: Square) -> int: opps: List[Player] = get_players(game, team, include_own=False, include_opp=True) num_opps_reach: int = 0 for cur in opps: dist = max(square.x - cur.position.x, square.y - cur.position.y) if cur.state.stunned: continue move_allowed = cur.get_ma() + 2 if not cur.state.up: move_allowed -= 3 if dist < move_allowed: num_opps_reach += 1 return num_opps_reach def num_opponents_on_field(game: Game, team: Team) -> int: opps: List[Player] = get_players(game, team, include_own=False, include_opp=True) num_opponents = 0 for cur in opps: if cur.position is not None: num_opponents += 1 return num_opponents def number_opponents_closer_than_to_endzone(game: Game, team: Team, square: Square) -> int: opponents: List[Player] = get_players(game, team, include_own=False, include_opp=True) num_opps = 0 distance_square_endzone = distance_to_defending_endzone(game, team, square) for opponent in opponents: distance_opponent_endzone = distance_to_defending_endzone(game, team, opponent.position) if distance_opponent_endzone < distance_square_endzone: num_opps += 1 return num_opps def in_scoring_range(game: Game, player: Player) -> bool: return player.num_moves_left() >= distance_to_scoring_endzone(game, player.team, player.position) def players_in_scoring_range(game: Game, team: Team, include_own=True, include_opp=True, include_used=True, include_stunned=True) -> List[Player]: players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned, include_used=include_used) res: List[Player] = [] for player in players: if in_scoring_range(game, player): res.append(player) return res def players_in(game: Game, team: Team, squares: List[Square], include_own=True, include_opp=True, include_used=True, include_stunned=True, only_blockable=False) -> List[Player]: allowed_players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable) res: List[Player] = [] for square in squares: player: Optional[Player] = game.get_player_at(square) if player is None: continue if player in allowed_players: res.append(player) return res
""" A number of static methods for interpretting the state of the fantasy football pitch that aren't required directly by the client """ from ffai.core import Game, Action, ActionType from ffai.core.procedure import * from ffai.util.pathfinding import * from typing import Optional, List, Dict class ActionSequence: def __init__(self, action_steps: List[Action], score: float = 0, description: str = ''): """ Creates a new ActionSequence - an ordered list of sequential Actions to attempt to undertake. :param action_steps: Sequence of action steps that form this action. :param score: A score representing the attractiveness of the move (default: 0) :param description: A debug string (default: '') """ # Note the intention of this object is that when the object is acting, as steps are completed, # they are removed from the move_sequence so the next move is always the top of the move_sequence # lis self.action_steps = action_steps self.score = score self.description = description def is_valid(self, game: Game) -> bool: pass def popleft(self): return self.action_steps.pop(0) #val = self.action_steps[0] #del self.action_steps[0] #return val def is_empty(self): return not self.action_steps class FfHeatMap: """ A heat map of a Blood Bowl field. A class for analysing zones of control for both teams """ def __init__(self, game: Game, team: Team): self.game=game self.team = team # Note that the edges are not on the field, but represent crowd squares self.units_friendly: List[List[float]] = [[0.0 for y in range(game.state.pitch.height)] for x in range(game.state.pitch.width)] self.units_opponent: List[List[float]] = [[0.0 for y in range(game.state.pitch.height)] for x in range(game.state.pitch.width)] def add_unit_paths(self, player:Player, paths: List[Path]): is_friendly: bool = player.team == self.team for path in paths: if is_friendly: self.units_friendly[path.steps[-1].x][path.steps[-1].y] += (1.0 - path.cost)*(1.0 - path.cost) else: self.units_opponent[path.steps[-1].x][path.steps[-1].y] += (1.0 - path.cost)*(1.0 - path.cost) def add_unit_by_paths(self, game: Game, paths: Dict[Player, List[Path]]): for player in paths.keys(): self.add_unit_paths(player, paths[player]) def add_players_moved(self, game: Game, players: List[Player]): for player in players: adjacents: List[Square] = game.get_adjacent_squares(player.position, occupied=True) self.units_friendly[player.position.x][player.position.y] += 1.0 for adjacent in adjacents: self.units_friendly[player.position.x][player.position.y] += 0.5 def get_ball_move_square_safety_score(self, square: Square) -> float: # Basic idea - identify safe regions to move the ball towards friendly_heat: float = self.units_friendly[square.x][square.y] opponent_heat: float = self.units_opponent[square.x][square.y] score: float = 30.0 * max(0.0, (1.0 - opponent_heat/2)) return score #score: float=0.0 #if opponent_heat < 0.25: score += 15.0 #if opponent_heat < 0.05: score += 15.0 #if opponent_heat < 1.5: score += 5 #if friendly_heat > 3.5: score += 10.0 #score += max(30.0, 5.0*(friendly_heat-opponent_heat)) return score def get_cage_necessity_score(self, square: Square) -> float: opponent_friendly: float = self.units_friendly[square.x][square.y] opponent_heat: float = self.units_opponent[square.x][square.y] score: float = 0.0 if opponent_heat < 0.4: score -= 80.0 # if opponent_friendly > opponent_heat: score -= max(30.0, 10.0*(opponent_friendly-opponent_heat)) # if opponent_heat <1.5: score -=5 # if opponent_heat > opponent_friendly: score += 10.0*(opponent_friendly-opponent_heat) return score def blitz_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_BLITZ: return False return True def handoff_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_HANDOFF: return False return True def foul_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_FOUL: return False return True def pass_used(game: Game) -> bool: for action in game.state.available_actions: if action.action_type == ActionType.START_PASS: return False return True def get_players(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = True, include_used: bool = True, include_off_pitch: bool = False, only_blockable: bool = False, only_used: bool = False) -> List[Player]: players: List[Player] = [] selected_players: List[Player] = [] for iteam in game.state.teams: if iteam == team and include_own: players.extend(iteam.players) if iteam != team and include_opp: players.extend(iteam.players) for player in players: if only_blockable and not player.state.up: continue if only_used and not player.state.used: continue if include_stunned or not player.state.stunned: if include_used or not player.state.used: if include_off_pitch or (player.position is not None and not game.is_out_of_bounds(player.position)): selected_players.append(player) return selected_players def caging_squares_north_east(game: Game, protect_square: Square) -> List[Square]: # * At it's simplest, a cage requires 4 platers in the North-East, South-East, South-West and North-West # * positions, relative to the ball carrier, such that there is no more than 3 squares between the players in # * each of those adjacent compass directions. # * # * 1 3 # * xx-xx # * xx-xx # * --o-- # * xx-xx # * xx-xx # * 3 4 # * # * pitch is 26 long # * # * # * Basically we need one player in each of the corners: 1-4, but spaced such that there is no gap of 3 squares. # * If the caging player is in 1-4, but next to ball carrier, he ensures this will automatically be me # * # * The only exception to this is when the ball carrier is on, or near, the sideline. Then return the squares # * that can otherwise form the cage. # * caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x <= game.state.pitch.width - 3: if y == game.state.pitch.height-2: caging_squares.append(game.get_square(x + 1, y + 1)) caging_squares.append(game.get_square(x + 2, y + 1)) caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) elif y == game.state.pitch.height-1: caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) else: caging_squares.append(game.get_square(x + 1, y + 1)) caging_squares.append(game.get_square(x + 1, y + 2)) caging_squares.append(game.get_square(x + 2, y + 1)) # caging_squares.append(game.state.pitch.get_square(x + 3, y + 3)) return caging_squares def caging_squares_north_west(game: Game, protect_square: Square) -> List[Square]: caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x >= 3: if y == game.state.pitch.height-2: caging_squares.append(game.get_square(x - 1, y + 1)) caging_squares.append(game.get_square(x - 2, y + 1)) caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) elif y == game.state.pitch.height-1: caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) else: caging_squares.append(game.get_square(x - 1, y + 1)) caging_squares.append(game.get_square(x - 1, y + 2)) caging_squares.append(game.get_square(x - 2, y + 1)) # caging_squares.append(game.state.pitch.get_square(x - 3, y + 3)) return caging_squares def caging_squares_south_west(game: Game, protect_square: Square) -> List[Square]: caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x >= 3: if y == 2: caging_squares.append(game.get_square(x - 1, y - 1)) caging_squares.append(game.get_square(x - 2, y - 1)) caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) elif y == 1: caging_squares.append(game.get_square(x - 1, y)) caging_squares.append(game.get_square(x - 2, y)) else: caging_squares.append(game.get_square(x - 1, y - 1)) caging_squares.append(game.get_square(x - 1, y - 2)) caging_squares.append(game.get_square(x - 2, y - 1)) # caging_squares.append(game.state.pitch.get_square(x - 3, y - 3)) return caging_squares def caging_squares_south_east(game: Game, protect_square: Square) -> List[Square]: caging_squares: List[Square] = [] x = protect_square.x y = protect_square.y if x <= game.state.pitch.width-3: if y == 2: caging_squares.append(game.get_square(x + 1, y - 1)) caging_squares.append(game.get_square(x + 2, y - 1)) caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) elif y == 1: caging_squares.append(game.get_square(x + 1, y)) caging_squares.append(game.get_square(x + 2, y)) else: caging_squares.append(game.get_square(x + 1, y - 1)) caging_squares.append(game.get_square(x + 1, y - 2)) caging_squares.append(game.get_square(x + 2, y - 1)) # caging_squares.append(game.get_square(x + 3, y - 3)) return caging_squares def is_caging_position(game: Game, player: Player, protect_player: Player) -> bool: return player.position.distance(protect_player.position) <= 2 and not is_castle_position_of(game, player, protect_player) def has_player_within_n_squares(game: Game, units: List[Player], square: Square, num_squares: int) -> bool: for cur in units: if cur.position.distance(square) <= num_squares: return True return False def has_adjacent_player(game: Game, square: Square) -> bool: return not game.get_adjacent_players(square) def is_castle_position_of(game: Game, player1: Player, player2: Player) -> bool: return player1.position.x == player2.position.x or player1.position.y == player2.position.y def is_bishop_position_of(game: Game, player1: Player, player2: Player) -> bool: return abs(player1.position.x - player2.position.x) == abs(player1.position.y - player2.position.y) def attacker_would_surf(game: Game, attacker: Player, defender: Player) -> bool: if (defender.has_skill(Skill.SIDE_STEP) and not attacker.has_skill(Skill.GRAB)) or defender.has_skill(Skill.STAND_FIRM): return False if not attacker.position.is_adjacent(defender.position): return False return direct_surf_squares(game, attacker.position, defender.position) def direct_surf_squares(game: Game, attack_square: Square, defend_square: Square) -> bool: defender_on_sideline: bool = on_sideline(game, defend_square) defender_in_endzone: bool = on_endzone(game, defend_square) if defender_on_sideline and defend_square.x == attack_square.x: return True if defender_in_endzone and defend_square.y == attack_square.y: return True if defender_in_endzone and defender_on_sideline: return True return False def reverse_x_for_right(game: Game, team: Team, x: int) -> int: if not game.is_team_side(Square(13, 3), team): res = game.state.pitch.width - 1 - x else: res = x return res def reverse_x_for_left(game: Game, team: Team, x: int) -> int: if game.is_team_side(Square(13, 3), team): res = game.state.pitch.width - 1 - x else: res = x return res def on_sideline(game: Game, square: Square) -> bool: return square.y == 1 or square.y == game.state.pitch.height - 1 def on_endzone(game: Game, square: Square) -> bool: return square.x == 1 or square.x == game.state.pitch.width - 1 def on_los(game: Game, team: Team, square: Square) -> bool: return (reverse_x_for_right(game, team, square.x) == 13) and 4 < square.y < 21 def los_squares(game: Game, team: Team) -> List[Square]: squares: List[Square] = [ game.get_square(reverse_x_for_right(game, team, 13), 5), game.get_square(reverse_x_for_right(game, team, 13), 6), game.get_square(reverse_x_for_right(game, team, 13), 7), game.get_square(reverse_x_for_right(game, team, 13), 8), game.get_square(reverse_x_for_right(game, team, 13), 9), game.get_square(reverse_x_for_right(game, team, 13), 10), game.get_square(reverse_x_for_right(game, team, 13), 11) ] return squares def distance_to_sideline(game: Game, square: Square) -> int: return min(square.y - 1, game.state.pitch.height - square.y - 2) def is_endzone(game, square: Square) -> bool: return square.x == 1 or square.x == game.state.pitch.width - 1 def last_block_proc(game) -> Optional[Block]: for i in range(len(game.state.stack.items) - 1, -1, -1): if isinstance(game.state.stack.items[i], Block): block_proc = game.state.stack.items[i] return block_proc return None def is_adjacent_ball(game: Game, square: Square) -> bool: ball_square = game.get_ball_position() return ball_square is not None and ball_square.is_adjacent(square) def squares_within(game: Game, square: Square, distance: int) -> List[Square]: squares: List[Square] = [] for i in range(-distance, distance+1): for j in range(-distance, distance+1): cur_square = game.get_square(square.x+i, square.y+j) if cur_square != square and not game.is_out_of_bounds(cur_square): squares.append(cur_square) return squares def distance_to_defending_endzone(game: Game, team: Team, position: Square) -> int: res = reverse_x_for_right(game, team, position.x) - 1 return res def distance_to_scoring_endzone(game: Game, team: Team, position: Square) -> int: res = reverse_x_for_left(game, team, position.x) - 1 return res #return game.state.pitch.width - 1 - reverse_x_for_right(game, team, position.x) def players_in_scoring_endzone(game: Game, team: Team, include_own: bool = True, include_opp: bool = False) -> List[Player]: players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp) selected_players: List[Player] = [] for player in players: if in_scoring_endzone(game, team, player.position): selected_players.append(player) return selected_players def in_scoring_endzone(game: Game, team: Team, square: Square) -> bool: return reverse_x_for_left(game, team, square.x) == 1 def players_in_scoring_distance(game: Game, team: Team, include_own: bool = True, include_opp: bool = True, include_stunned: bool = False) -> List[Player]: players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned) selected_players: List[Player] = [] for player in players: if distance_to_scoring_endzone(game, team, player.position) <= player.num_moves_left(): selected_players.append(player) return selected_players def distance_to_nearest_player(game: Game, team: Team, square: Square, include_own: bool = True, include_opp: bool = True, only_used: bool = False, include_used: bool = True, include_stunned: bool = True, only_blockable: bool = False) -> int: opps: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, only_used=only_used, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable) cur_max = 100 for opp in opps: dist = opp.position.distance(square) cur_max = min(cur_max, dist) return cur_max def screening_distance(game: Game, from_square: Square, to_square: Square) -> float: # Return the "screening distance" between 3 squares. (To complete) # float dist =math.sqrt(math.pow(square.x - cur.position.x, 3) + math.pow(square.y - cur.position.y, 3)) return 0.0 def num_opponents_can_reach(game: Game, team: Team, square: Square) -> int: opps: List[Player] = get_players(game, team, include_own=False, include_opp=True) num_opps_reach: int = 0 for cur in opps: dist = max(square.x - cur.position.x, square.y - cur.position.y) if cur.state.stunned: continue move_allowed = cur.get_ma() + 2 if not cur.state.up: move_allowed -= 3 if dist < move_allowed: num_opps_reach += 1 return num_opps_reach def num_opponents_on_field(game: Game, team: Team) -> int: opps: List[Player] = get_players(game, team, include_own=False, include_opp=True) num_opponents = 0 for cur in opps: if cur.position is not None: num_opponents += 1 return num_opponents def number_opponents_closer_than_to_endzone(game: Game, team: Team, square: Square) -> int: opponents: List[Player] = get_players(game, team, include_own=False, include_opp=True) num_opps = 0 distance_square_endzone = distance_to_defending_endzone(game, team, square) for opponent in opponents: distance_opponent_endzone = distance_to_defending_endzone(game, team, opponent.position) if distance_opponent_endzone < distance_square_endzone: num_opps += 1 return num_opps def in_scoring_range(game: Game, player: Player) -> bool: return player.num_moves_left() >= distance_to_scoring_endzone(game, player.team, player.position) def players_in_scoring_range(game: Game, team: Team, include_own=True, include_opp=True, include_used=True, include_stunned=True) -> List[Player]: players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_stunned=include_stunned, include_used=include_used) res: List[Player] = [] for player in players: if in_scoring_range(game, player): res.append(player) return res def players_in(game: Game, team: Team, squares: List[Square], include_own=True, include_opp=True, include_used=True, include_stunned=True, only_blockable=False) -> List[Player]: allowed_players: List[Player] = get_players(game, team, include_own=include_own, include_opp=include_opp, include_used=include_used, include_stunned=include_stunned, only_blockable=only_blockable) res: List[Player] = [] for square in squares: player: Optional[Player] = game.get_player_at(square) if player is None: continue if player in allowed_players: res.append(player) return res
en
0.805699
A number of static methods for interpretting the state of the fantasy football pitch that aren't required directly by the client Creates a new ActionSequence - an ordered list of sequential Actions to attempt to undertake. :param action_steps: Sequence of action steps that form this action. :param score: A score representing the attractiveness of the move (default: 0) :param description: A debug string (default: '') # Note the intention of this object is that when the object is acting, as steps are completed, # they are removed from the move_sequence so the next move is always the top of the move_sequence # lis #val = self.action_steps[0] #del self.action_steps[0] #return val A heat map of a Blood Bowl field. A class for analysing zones of control for both teams # Note that the edges are not on the field, but represent crowd squares # Basic idea - identify safe regions to move the ball towards #score: float=0.0 #if opponent_heat < 0.25: score += 15.0 #if opponent_heat < 0.05: score += 15.0 #if opponent_heat < 1.5: score += 5 #if friendly_heat > 3.5: score += 10.0 #score += max(30.0, 5.0*(friendly_heat-opponent_heat)) # if opponent_friendly > opponent_heat: score -= max(30.0, 10.0*(opponent_friendly-opponent_heat)) # if opponent_heat <1.5: score -=5 # if opponent_heat > opponent_friendly: score += 10.0*(opponent_friendly-opponent_heat) # * At it's simplest, a cage requires 4 platers in the North-East, South-East, South-West and North-West # * positions, relative to the ball carrier, such that there is no more than 3 squares between the players in # * each of those adjacent compass directions. # * # * 1 3 # * xx-xx # * xx-xx # * --o-- # * xx-xx # * xx-xx # * 3 4 # * # * pitch is 26 long # * # * # * Basically we need one player in each of the corners: 1-4, but spaced such that there is no gap of 3 squares. # * If the caging player is in 1-4, but next to ball carrier, he ensures this will automatically be me # * # * The only exception to this is when the ball carrier is on, or near, the sideline. Then return the squares # * that can otherwise form the cage. # * # caging_squares.append(game.state.pitch.get_square(x + 3, y + 3)) # caging_squares.append(game.state.pitch.get_square(x - 3, y + 3)) # caging_squares.append(game.state.pitch.get_square(x - 3, y - 3)) # caging_squares.append(game.get_square(x + 3, y - 3)) #return game.state.pitch.width - 1 - reverse_x_for_right(game, team, position.x) # Return the "screening distance" between 3 squares. (To complete) # float dist =math.sqrt(math.pow(square.x - cur.position.x, 3) + math.pow(square.y - cur.position.y, 3))
3.358604
3
sb_backend/cli/cli.py
DmitriyGrigoriev/sb-fastapi
0
9875
# -*- coding: utf-8 -*- """sb-fastapi CLI root.""" import logging import click from sb_backend.cli.commands.serve import serve @click.group() @click.option( "-v", "--verbose", help="Enable verbose logging.", is_flag=True, default=False, ) def cli(**options): """sb-fastapi CLI root.""" if options["verbose"]: level = logging.DEBUG else: level = logging.INFO logging.basicConfig( level=level, format="[%(asctime)s] [%(process)s] [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S %z", ) cli.add_command(serve)
# -*- coding: utf-8 -*- """sb-fastapi CLI root.""" import logging import click from sb_backend.cli.commands.serve import serve @click.group() @click.option( "-v", "--verbose", help="Enable verbose logging.", is_flag=True, default=False, ) def cli(**options): """sb-fastapi CLI root.""" if options["verbose"]: level = logging.DEBUG else: level = logging.INFO logging.basicConfig( level=level, format="[%(asctime)s] [%(process)s] [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S %z", ) cli.add_command(serve)
en
0.396647
# -*- coding: utf-8 -*- sb-fastapi CLI root. sb-fastapi CLI root.
2.063007
2
1-Chapter/htmlcomponents.py
DSandovalFlavio/Dashboards-Plotly-Dash
0
9876
import dash from dash import html app = dash.Dash(__name__) app.layout = html.Div(children=[html.H1('Data Science', style = {'textAlign': 'center', 'color': '#0FD08D', 'font-size': '50px'}), html.H2('La carrera mas sexy del siglo XXI', style = {'textAlign': 'center', 'color' : '#009A64'}), html.P('Factores clave:'), html.Ul(children = [html.Li('Factor 1'), html.Li('Factor 2'), html.Li('Factor 3'), html.Li(['Source: ', html.A('https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946', href = 'https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946') ]) ]) ]) if __name__ == '__main__': app.run_server(debug=True)
import dash from dash import html app = dash.Dash(__name__) app.layout = html.Div(children=[html.H1('Data Science', style = {'textAlign': 'center', 'color': '#0FD08D', 'font-size': '50px'}), html.H2('La carrera mas sexy del siglo XXI', style = {'textAlign': 'center', 'color' : '#009A64'}), html.P('Factores clave:'), html.Ul(children = [html.Li('Factor 1'), html.Li('Factor 2'), html.Li('Factor 3'), html.Li(['Source: ', html.A('https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946', href = 'https://www.excelsior.com.mx/nacional/ciencia-de-datos-la-carrera-mas-sexy-del-xxi-en-la-unam/1323946') ]) ]) ]) if __name__ == '__main__': app.run_server(debug=True)
none
1
2.746451
3
baadalinstallation/baadal/modules/vm_helper.py
iitd-plos/baadal2.0
8
9877
# -*- coding: utf-8 -*- ################################################################################### from gluon import current from helper import get_constant, execute_remote_cmd, config, get_datetime, \ log_exception, is_pingable, get_context_path from libvirt import * # @UnusedWildImport from log_handler import logger from nat_mapper import create_mapping, remove_mapping import math, shutil, libvirt, os, time, random import xml.etree.ElementTree as etree def _choose_datastore(): """ Chooses datastore from a list of available datastores """ # datastore_capacity = current.db(current.db.datastore.id >= 0).select(orderby = current.db.datastore.used datastores = current.db(current.db.datastore.id >= 0).select() datastore_length = len(datastores) logger.debug("datastore_lengtn" + str(datastore_length)) if(datastore_length == 0): raise Exception("No datastore found.") else: count = datastore_length available_datastores = {} while count != 0: available = datastores[datastore_length-count].capacity - datastores[datastore_length-count].used available_datastores[datastores[datastore_length-count]] = available count = count-1 z = [(i,available_datastores[i]) for i in available_datastores] z.sort(key=lambda x: x[1]) available_datastores = z logger.debug("available d" + str(available_datastores[-1])) first_elts = available_datastores[-1] first_elts = first_elts[0] logger.debug("selected database" + str(first_elts)) return first_elts def host_resources_used(host_id): """ Returns resources utilization of a host in MB, Count """ RAM = 0.0 CPU = 0.0 vms = current.db((current.db.vm_data.host_id == host_id) & (current.db.vm_data.status != current.VM_STATUS_UNKNOWN) & (current.db.vm_data.status != current.VM_STATUS_IN_QUEUE)).select() logger.debug("vms selected are: " + str(vms)) for vm_data in vms: RAM += vm_data.RAM CPU += vm_data.vCPU return (math.ceil(RAM),math.ceil(CPU)) def getVirshDomainConn(vm_details, host_ip=None, domain_name=None): """ Generic method to establish libvirt connection """ if vm_details != None: host_ip = vm_details.host_id.host_ip.private_ip domain_name = vm_details.vm_identity connection_object = libvirt.open("qemu+ssh://root@" + host_ip + "/system") domain = connection_object.lookupByName(domain_name) return (connection_object, domain) def getVirshDomain(vm_details): """ Generic method to establish libvirt connection """ (connection_object, domain) = getVirshDomainConn(vm_details) connection_object.close() return domain def _set_portgroup_in_vm(domain_name, portgroup, host_ip, vlan_tag): """ Set the vlan tag in network configuration of VM This is required to ensure that VM fetches IP of its vlan from DHCP """ (connection_object, domain) = getVirshDomainConn(None, host_ip, domain_name) xml = etree.fromstring(domain.XMLDesc(0)) source_network_element = xml.find('.//interface/source') source_network_string=etree.tostring(source_network_element) logger.debug("Source network is " + source_network_string) if source_network_string.find(" bridge=") != -1: logger.debug("Source is set to bridge adding <vlan><tag_id> to the interface tag ") root_new = xml.find('.//interface') root_new_vlan= etree.SubElement(root_new, 'vlan') root_new_tag= etree.SubElement(root_new_vlan, 'tag') root_new_tag.set('id',vlan_tag) logger.debug("After append root_new_vlan is " + etree.tostring(root_new_vlan)) elif source_network_string.find(" network=") != -1: logger.debug("Source is set to network adding portgroup to the source tag ") source_network_element.set('portgroup', portgroup) logger.debug("Changed source network is " + etree.tostring(source_network_element)) else: logger.debug("Neither VM nor vlan tagId is added in the xml" ) domain = connection_object.defineXML(etree.tostring(xml)) domain.destroy() domain.create() domain.isActive() connection_object.close() def _get_private_ip_mac(security_domain_id): """ Chooses a random Private IP from the pool, such that: - It is not assigned to any VM or host - It belongs to VLAN of given security domain """ vlans = current.db(current.db.security_domain.id == security_domain_id)._select(current.db.security_domain.vlan) private_ip_pool = current.db((~current.db.private_ip_pool.id.belongs(current.db(current.db.vm_data.private_ip != None)._select(current.db.vm_data.private_ip))) & (~current.db.private_ip_pool.id.belongs(current.db(current.db.host.host_ip != None)._select(current.db.host.host_ip))) & (current.db.private_ip_pool.vlan.belongs(vlans))).select(current.db.private_ip_pool.ALL, orderby='<random>').first() if private_ip_pool: return private_ip_pool else: sd = current.db.security_domain[security_domain_id] raise Exception(("Available MACs are exhausted for security domain '%s'." % sd.name)) def _choose_random_public_ip(): """ Chooses a random Public IP from the pool, such that: - It is not assigned to any VM - It is not assigned to any host - IP is marked active. """ public_ip_pool = current.db((~current.db.public_ip_pool.id.belongs(current.db(current.db.vm_data.public_ip != None)._select(current.db.vm_data.public_ip))) & (~current.db.public_ip_pool.id.belongs(current.db(current.db.host.public_ip != None)._select(current.db.host.public_ip))) & (current.db.public_ip_pool.is_active == True)) \ .select(current.db.public_ip_pool.ALL, orderby='<random>').first() return public_ip_pool def _choose_mac_ip(vm_properties): """ Chooses mac address and ip address for a vm to be installed. It also chooses a random public IP if requested """ if not 'private_ip' in vm_properties: private_ip_info = _get_private_ip_mac(vm_properties['security_domain']) vm_properties['private_ip'] = private_ip_info.private_ip vm_properties['mac_addr'] = private_ip_info.mac_addr vm_properties['vlan_name'] = private_ip_info.vlan.name vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag if vm_properties['public_ip_req']: if 'public_ip' not in vm_properties: public_ip_pool = _choose_random_public_ip() if public_ip_pool: vm_properties['public_ip'] = public_ip_pool.public_ip else: raise Exception("Available Public IPs are exhausted.") else: vm_properties['public_ip'] = None def _choose_mac_ip_vncport(vm_properties): """ Chooses mac address, ip address and vncport for a vm to be installed """ _choose_mac_ip(vm_properties) start_range = int(get_constant('vncport_start_range')) end_range = int(get_constant('vncport_end_range')) vnc_ports_taken = current.db().select(current.db.vm_data.vnc_port) while True: random_vnc_port = random.randrange(start_range, end_range, 1) if not random_vnc_port in vnc_ports_taken: break; vm_properties['vnc_port'] = str(random_vnc_port) def find_new_host(RAM, vCPU): """ Select a random host from list of 3 hosts with available RAM and CPU Availability is checked with 200 percent over-commitment. """ hosts = current.db(current.db.host.status == 1).select() hosts = hosts.as_list(True,False) count = 3 selected_hosts = [] while count != 0 and hosts: host = random.choice(hosts) logger.debug("Checking host =" + host['host_name']) (used_ram, used_cpu) = host_resources_used(host['id']) logger.debug("used ram: " + str(used_ram) + " used cpu: " + str(used_cpu) + " host ram: " + str(host['RAM']) + " host cpu "+ str(host['CPUs'])) host_ram_after_200_percent_overcommitment = math.floor((host['RAM'] * 1024) * 2) host_cpu_after_200_percent_overcommitment = math.floor(host['CPUs'] * 2) logger.debug("ram available: %s cpu available: %s cpu < max cpu: %s" % ((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM), ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU), (vCPU <= host['CPUs']) )) if((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU) and (vCPU <= host['CPUs'])): selected_hosts.append(host) count = count -1 hosts.remove(host) if selected_hosts: #Sort selected host list by Ram first then Cpu selected_host = sorted(selected_hosts,key=lambda k: k['RAM'])[0] return selected_host['id'] #If no suitable host found raise Exception("No active host is available for a new vm.") def allocate_vm_properties(vm_details): """ Allocates vm properties ( datastore, host, ip address, mac address, vnc port, ram, vcpus) """ logger.debug("Inside allocate_vm_properties()...") vm_properties = {} vm_properties['datastore'] = _choose_datastore() logger.debug("Datastore selected is: " + str(vm_properties['datastore'])) vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU) logger.debug("Host selected is: " + str(vm_properties['host'])) vm_properties['public_ip_req'] = False if (vm_details.public_ip == None) else True vm_properties['security_domain'] = vm_details.security_domain _choose_mac_ip_vncport(vm_properties) logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + " VNCPORT is : " \ + str(vm_properties['vnc_port']) + " Vlan tag is " + str(vm_properties['vlan_tag']) ) vm_properties['ram'] = vm_details.RAM vm_properties['vcpus'] = vm_details.vCPU return vm_properties def create_vm_image(vm_details, datastore): """ Create a VM image - Creates a directory for the new VM using vm_identity - Find the location of template image requested for - Copy the template image from its location to new vm directory """ # Creates a directory for the new vm vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity logger.debug("Creating vm directory...") if not os.path.exists (vm_directory_path): os.makedirs(vm_directory_path) else: raise Exception("Directory with same name as vmname already exists.") # Finds the location of template image that the user has requested for its vm. template = current.db.template[vm_details.template_id] vm_image_name = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2' # Copies the template image from its location to new vm directory storage_type = config.get("GENERAL_CONF","storage_type") copy_command = 'ndmpcopy ' if storage_type == current.STORAGE_NETAPP_NFS else 'cp ' #template_dir = get_constant('vm_templates_datastore') if copy_command == 'cp ': template_location = datastore.system_mount_point + '/' + get_constant('templates_dir') + '/' + template.hdfile logger.debug("cp %s %s" % (template_location, vm_image_name)) rc = os.system("cp %s %s" % (template_location, vm_image_name)) if rc != 0: logger.error("Copy not successful") raise Exception("Copy not successful") else: logger.debug("Copied successfully") elif copy_command == 'ndmpcopy ': template_dir = template.datastore_id.path logger.debug(template_dir) logger.debug("Copy in progress when storage type is " + str(storage_type)) command_to_execute = copy_command + template_dir + '/' + get_constant("templates_dir") + '/' + \ template.hdfile + ' ' + datastore.path + '/' + get_constant('vms') + '/' + \ vm_details.vm_identity logger.debug("ndmpcopy command: " + str(command_to_execute)) command_output = execute_remote_cmd(datastore.ds_ip, datastore.username, command_to_execute, datastore.password) logger.debug(command_output) logger.debug("Copied successfully.") try: vm_template_name = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity + '/' + template.hdfile os.rename(vm_template_name, vm_image_name) logger.debug("Template renamed successfully") except: logger.debug("Template rename not successful") raise Exception("Template rename not successful") return (template, vm_image_name) def _get_install_command(vm_details, vm_image_location, vm_properties): """ Generates install command for vm """ template = vm_properties['template'] bus = ',bus=virtio' optional = ' --import --os-type=' + template.os model = ',model=virtio' if (template.arch != 'amd64' and template.os == 'Linux'): optional = optional + ' --arch=' + template.arch + ' ' format_command = '' if (template.type == 'QCOW2'): format_command = ',format=qcow2' if (template.os == 'Windows'): bus = '' model = '' install_command = 'virt-install \ --name=' + vm_details.vm_identity + ' \ --ram=' + str(vm_properties['ram']) + ' \ --vcpus=' + str(vm_properties['vcpus']) + optional + ' \ --disk path=' + vm_image_location + format_command + bus + ',cache=none' + ' \ --network network='+current.LIBVIRT_NETWORK + model + ',mac=' + vm_properties['mac_addr'] + ' \ --graphics vnc,port=' + vm_properties['vnc_port'] + ',listen=0.0.0.0,password=<PASSWORD> \ --noautoconsole \ --autostart \ --force' return install_command def _generate_disk_xml(diskpath,target_disk): """ Generates xml for defining new disk """ root_element = etree.Element('disk',attrib = {'type':'block','device':'disk'}) etree.SubElement(root_element, 'driver',attrib = {'name':'qemu','cache':'none', 'type':'qcow2'}) etree.SubElement(root_element, 'source', attrib = {'dev':diskpath}) etree.SubElement(root_element, 'target', attrib = {'dev': target_disk}) return (etree.tostring(root_element)) def create_extra_disk_image(vm_details, disk_name, size, datastore): """ Create extra disk image """ vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ datastore.ds_name + '/' + vm_details.vm_identity if not os.path.exists (vm_extra_disks_directory_path): logger.debug("Making Directory") os.makedirs(vm_extra_disks_directory_path) diskpath = vm_extra_disks_directory_path + '/' + disk_name command= "qemu-img create -f qcow2 "+ diskpath + " " + str(size) + "G" output = os.system(command) return False if output != 0 else True def attach_disk(vm_details, disk_name, hostip, already_attached_disks, new_vm): """ Attach given disk to the VM """ try: (connection_object, domain) = getVirshDomainConn(None, hostip, vm_details.vm_identity) #already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm.id).select()) logger.debug("Value of alreadyattached is : " + str(already_attached_disks)) (diskpath, device_present, disk_size) = get_extra_disk_location(vm_details.datastore_id, vm_details.vm_identity, disk_name, True) if not device_present: raise Exception("Device to be attached %s missing" %(diskpath)) # Attaching disk to vm using libvirt API target_disk = "vd" + chr(97 + already_attached_disks + 1) logger.debug(target_disk) logger.debug("...................") xmlDescription = _generate_disk_xml(diskpath, target_disk) logger.debug(xmlDescription) logger.debug("new vm is %s " % new_vm) if new_vm: logger.debug("Starting to attach disk on new vm request.") domain.destroy() logger.debug("VM destroyed") domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG) logger.debug("Disk attached") logger.debug("Turn on vm") domain.create() logger.debug("VM started") domain.isActive() elif vm_details.status == current.VM_STATUS_SHUTDOWN: logger.debug("Starting to attach disk while vm is shutdown.") domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG) logger.debug("Disk attached") else: raise Exception("VM is not in shutdown state. Check its status on host") xmlfile = domain.XMLDesc(0) domain = connection_object.defineXML(xmlfile) logger.debug("VM XML redefined") connection_object.close() return disk_size except: logger.exception('Exception: ') return 0 def serve_extra_disk_request(vm_details, disk_size, host_ip, new_vm = False): """ Serves extra disk request and updates db """ logger.debug("Starting to serve extra disk request...") logger.debug("new vm is %s " % new_vm) datastore = _choose_datastore() already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm_details.id).select()) disk_name = vm_details.vm_identity + "_disk" + str(already_attached_disks + 1) + ".qcow2" disk_created = create_extra_disk_image(vm_details, disk_name, disk_size, datastore) vm_details.datastore_id = datastore.id if disk_created: if (attach_disk(vm_details, disk_name, host_ip, already_attached_disks, new_vm)): current.db.attached_disks.insert(vm_id = vm_details.id, datastore_id = datastore.id , attached_disk_name = disk_name, capacity = disk_size) current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(disk_size)) return True return False def launch_vm_on_host(vm_details, vm_image_location, vm_properties): """ Launches a vm image on host """ attach_disk_status_message = '' install_command = _get_install_command(vm_details, vm_image_location, vm_properties) # Starts installing a vm host_ip = current.db.host[vm_properties['host']].host_ip.private_ip logger.debug("Installation started...") logger.debug("Host is "+ host_ip) logger.debug("Installation command : " + install_command) command_output = execute_remote_cmd(host_ip, 'root', install_command) logger.debug(command_output) logger.debug("Starting to set portgroup in vm...") _set_portgroup_in_vm(vm_details['vm_identity'], vm_properties['vlan_name'], host_ip, vm_properties['vlan_tag']) logger.debug("Portgroup set in vm") # Serving HDD request if (int(vm_details.extra_HDD) != 0): if (serve_extra_disk_request(vm_details, vm_details.extra_HDD, host_ip, new_vm = True)): message = "Attached extra disk successfully." attach_disk_status_message += message logger.debug(message) else: attach_disk_status_message += "Attached extra disk failed." return attach_disk_status_message def check_if_vm_defined(hostip, vmname): """ Checks if a newly created vm is successfully defined """ vm_defined = False try: connection_object = libvirt.openReadOnly('qemu+ssh://root@'+ hostip +'/system') domain = connection_object.lookupByName(vmname) if domain.ID() in connection_object.listDomainsID(): vm_defined = True connection_object.close() return vm_defined except: return False def _free_vm_properties(vm_details, vm_properties): """ Frees vm properties in-case installation has failed mid-way """ logger.debug("VM installation fails..Starting to free vm properties") if vm_properties: host_ip_of_vm = current.db.host[vm_properties['host']].host_ip.private_ip logger.debug("Host IP of vm is " + str(host_ip_of_vm)) if check_if_vm_defined(host_ip_of_vm, vm_details.vm_identity): connection_object = libvirt.open('qemu+ssh://root@'+ host_ip_of_vm +'/system') domain = connection_object.lookupByName(vm_details.vm_identity) logger.debug("Starting to delete vm from host..") domain.destroy() domain.undefine() connection_object.close() logger.debug("VM deleted.") current.db(current.db.attached_disks.vm_id == vm_details.id).delete() if 'datastore' in vm_properties: vm_directory_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity vm_extra_disk_dir_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + vm_properties['datastore'].ds_name + '/' + vm_details.vm_identity if os.path.exists (vm_directory_path): logger.debug("Starting to delete vm directory.") shutil.rmtree(vm_directory_path) if os.path.exists (vm_extra_disk_dir_path): logger.debug("Starting to delete vm extra disk directory.") shutil.rmtree(vm_extra_disk_dir_path) return def update_db_after_vm_installation(vm_details, vm_properties, parent_id = None): """ Updates db after a vm is installed successfully """ logger.debug("Starting to update db after vm installation..") hostid = vm_properties['host'] datastore = vm_properties['datastore'] template_hdd = vm_properties['template'].hdd logger.debug("Inside update db after installation") logger.debug(vm_properties) # Updating the used entry of datastore current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(vm_details.extra_HDD) + int(template_hdd)) private_ip_id = current.db.private_ip_pool(private_ip=vm_properties['private_ip']).id public_ip_id = None if vm_properties['public_ip'] != None: public_ip_id = current.db.public_ip_pool(public_ip=vm_properties['public_ip']).id if parent_id: vm_status = current.VM_STATUS_SHUTDOWN else: vm_status = current.VM_STATUS_RUNNING # Update vm_data table current.db(current.db.vm_data.id == vm_details.id).update( host_id = hostid, extra_HDD = vm_details.extra_HDD, datastore_id = datastore.id, vnc_port = vm_properties['vnc_port'], private_ip = private_ip_id, public_ip = public_ip_id, start_time = get_datetime(), parent_id = parent_id, status = vm_status) logger.debug("Updated db") return def create_object_store(parameters,object_data): try: logger.debug("In create_object_store() function...") object_name=object_data['object_store_name'] size_limit=object_data['object_store_size'] sh_path = os.path.join(get_context_path(), 'private/object_storage.sh') command = 'sh %s %s %s' %(sh_path, object_name, str(size_limit)) logger.debug("command :%s" %command) file_name= object_data['object_store_name'] + "_key.txt" file_path = os.path.join(get_context_path(), 'private/Object_keys/' + file_name) cp = os.open(file_path,os.O_RDWR|os.O_CREAT) co = os.fdopen(cp,"rw+") fd = os.open('/home/key.txt',os.O_RDWR|os.O_CREAT) fo = os.fdopen(fd,"r+") key_s3_secret= fo.readline(); co.write(key_s3_secret); key_s3_access= fo.readline(); co.write(key_s3_access); key_swift_secret= fo.readline(); co.write(key_swift_secret); swift_user= 'Swift_user: ' + object_name + ':swift' co.write(swift_user) co.close() a,b,key_swift_secret= key_swift_secret.partition(' ') # @UnusedVariable a,b,key_s3_secret= key_s3_secret.partition(' ') # @UnusedVariable a,b,key_s3_access= key_s3_access.partition(' ') # @UnusedVariable #print key_s3_secret, key_s3_access , key_swift_secret object_data.update_record(swift_access_key= key_swift_secret.strip() , s3_secret_key= key_s3_secret.strip(), s3_access_key= key_s3_access.strip(), status=3) fo.close() message = "Object Store is created successfully." return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) # Installs a vm def install(parameters): """ Installs a vm """ vmid = parameters['vm_id'] logger.debug("In install() function...") vm_details = current.db.vm_data[vmid] vm_properties = None try: # Fetches vm details from vm_data table logger.debug("VM details are: " + str(vm_details)) # Calling allocate_vm_properties function vm_properties = allocate_vm_properties(vm_details) # Calling create_vm_image function (vm_properties['template'], vm_image_location) = create_vm_image(vm_details, vm_properties['datastore']) # Calling launch_vm_on_host attach_disk_status_message = launch_vm_on_host(vm_details, vm_image_location, vm_properties) # Checking if vm has been installed successfully assert(check_if_vm_defined(current.db.host[vm_properties['host']].host_ip.private_ip, vm_details.vm_identity)), "VM is not installed. Check logs." if vm_properties['public_ip_req']: create_mapping(vm_properties['public_ip'], vm_properties['private_ip']) # Update database after vm installation update_db_after_vm_installation(vm_details, vm_properties) message = "VM is installed successfully." + attach_disk_status_message logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: if vm_properties != None: _free_vm_properties(vm_details, vm_properties) logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def start(parameters): """ Starts a vm """ logger.debug("Inside start() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_RUNNING: raise Exception("VM is already running. Check vm status on host.") domain.create() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING) message = vm_details.vm_identity + " is started successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def suspend(parameters): """ Suspends a vm """ logger.debug("Inside suspend() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_PAUSED: raise Exception("VM is already paused. Check vm status on host.") domain.suspend() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SUSPENDED) message = vm_details.vm_identity + " is suspended successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def resume(parameters): """ Resumes a vm """ logger.debug("Inside resume() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_RUNNING: raise Exception("VM is already running. Check vm status on host.") domain.resume() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING) message = vm_details.vm_identity + " is resumed successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def destroy(parameters): """ Destroys a vm forcefully """ logger.debug("Inside destroy() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_SHUTOFF: raise Exception("VM is already shutoff. Check vm status on host.") domain.destroy() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN) message = vm_details.vm_identity + " is destroyed successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def shutdown(parameters): """ Destroys a vm gracefully """ logger.debug("Inside shutdown() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_SHUTOFF: raise Exception("VM is already shutoff. Check vm status on host.") domain.managedSave() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN) message = vm_details.vm_identity + " is shutdown successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def _clean_up_database_after_vm_deletion(vm_details): """ Cleans up database after vm deletion """ logger.debug("Inside clean up database after vm deletion () function...") # moving vm image folder to archives folder archive_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('archives_dir') if not os.path.exists(archive_directory_path): os.makedirs(archive_directory_path) source_file = vm_details.datastore_id.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity archive_filename = vm_details.vm_identity + str(get_datetime()) logger.debug(archive_filename) destination_file = archive_directory_path + '/' + archive_filename shutil.move(source_file, destination_file) # removing hdd vm_extra_disks_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ vm_details.datastore_id.ds_name + "/" + vm_details.vm_identity if os.path.exists(vm_extra_disks_directory_path): shutil.rmtree(vm_extra_disks_directory_path) # updating the used entry of database current.db(current.db.datastore.id == vm_details.datastore_id).update(used = int(vm_details.datastore_id.used) - \ (int(vm_details.extra_HDD) + int(vm_details.template_id.hdd))) # updating task_queue_event entry to remove reference of VM current.db(current.db.task_queue_event.vm_id == vm_details.id).update(vm_id = None) # deleting entry of extra disk of vm current.db(current.db.attached_disks.vm_id == vm_details.id).delete() logger.debug("Database cleaned") def vm_has_snapshots(vm_id): """ Checks if a vm has snapshot(s) """ if (current.db(current.db.snapshot.vm_id == vm_id).select()): return True else: return False def delete(parameters): """ Deletes a vm """ logger.debug("Inside delete() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) logger.debug(str(vm_details.status)) if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED): logger.debug("Vm is not shutoff. Shutting it off first.") domain.destroy() logger.debug("Starting to delete it...") domain.undefineFlags(VIR_DOMAIN_UNDEFINE_SNAPSHOTS_METADATA ) if vm_details.public_ip: remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip) message = vm_details.vm_identity + " is deleted successfully." logger.debug(message) _clean_up_database_after_vm_deletion(vm_details) current.db(current.db.vm_data.id == vm_id).delete() current.db.commit() logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration): """ Migrate domain with snapshots """ # XML dump of snapshot(s) of the vm logger.debug("Starting to take xml dump of the snapshot(s) of the vm... ") if not os.path.exists(vm_backup_during_migration): os.makedirs(vm_backup_during_migration) for domain_snapshot in domain_snapshots_list: logger.debug("snapshot name is " + str(domain_snapshot)) dump_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot snapshot_dumpxml_command = 'virsh snapshot-dumpxml %s %s > %s' % ( vm_details.vm_identity, domain_snapshot, dump_xml_path) logger.debug("Taking xml dump of" + str(domain_snapshot)) command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_dumpxml_command) logger.debug(command_output) logger.debug("XML dump of " + str(domain_snapshot) + "succeeded.") # Delete snapshot(s) of the vm and migrate it to destination host logger.debug("Starting to delete snapshots of the vm....") for domain_snapshot in domain_snapshots_list: snapshot = domain.snapshotLookupByName(domain_snapshot, 0) snapshot.delete(0) logger.debug("Migrating the vm to destination host...") domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0) # Redefine all the snapshot(s) of the vm on the destination host and set current snapshot logger.debug("Starting to redefine all the snapshot(s) of the domain...") for domain_snapshot in domain_snapshots_list: redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path) command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_redefine_command) logger.debug(command_output) snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name) command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_current_command) logger.debug(command_output) return def _clean_migration_directory(vm_backup_during_migration): """ Delete directory created for storing dumpxml of vm snapshots """ if os.path.exists(vm_backup_during_migration): shutil.rmtree(vm_backup_during_migration) return def undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration): """ Undo the migration """ if domain_snapshots_list: # Redefine the snapshots of the vm on the source host logger.debug("Starting to redefine all the snapshot(s) of the vm on the source host...") for domain_snapshot in domain_snapshots_list: redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path) command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_redefine_command, None, True) logger.debug(command_output) snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name) command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_current_command, None, True) logger.debug(command_output) # Delete directory created for storing dumpxml of vm snapshots _clean_migration_directory(vm_backup_during_migration) return def migrate_domain(vm_id, destination_host_id=None, live_migration=False): """ Migrate domain """ vm_details = current.db.vm_data[vm_id] domain_snapshots_list = [] current_snapshot_name = '' vm_migration_directory = get_constant('vm_migration_data') vm_backup_during_migration = vm_details.datastore_id.system_mount_point + '/' + vm_migration_directory + '/' + \ vm_details.vm_identity if destination_host_id == None: destination_host_id = find_new_host(vm_details.RAM, vm_details.vCPU) destination_host_ip = current.db.host[destination_host_id].host_ip.private_ip flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_UNSAFE if live_migration: flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_LIVE if vm_details.status == current.VM_STATUS_SUSPENDED: logger.debug("Vm is suspended") flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_PAUSED elif vm_details.status == current.VM_STATUS_SHUTDOWN: logger.debug("Vm is shut off") flags |= VIR_MIGRATE_OFFLINE logger.debug("Flags: " + str(flags)) try: domain = getVirshDomain(vm_details) dom_snapshot_names = domain.snapshotListNames(0) for snapshot in current.db(current.db.snapshot.vm_id == vm_id).select(): logger.debug("snapshot:" + str(snapshot.snapshot_name)) domain_snapshots_list.append(snapshot.snapshot_name) dom_snapshot_names.remove(snapshot.snapshot_name) logger.debug("domain snapshot list is " + str(domain_snapshots_list)) for dom_snapshot in dom_snapshot_names: logger.debug("Deleting orphan snapshot %s" %(dom_snapshot)) snapshot = domain.snapshotLookupByName(dom_snapshot, 0) snapshot.delete(0) if domain_snapshots_list: current_snapshot = domain.snapshotCurrent(0) current_snapshot_name = current_snapshot.getName() migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration) else: domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0) vm_details.update_record(host_id = destination_host_id) current.db.commit() # Delete directory created for storing dumpxml of vm snapshot _clean_migration_directory(vm_backup_during_migration) message = vm_details.vm_identity + " is migrated successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration) logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def migrate_domain_datastore(vmid, destination_datastore_id, live_migration=False): """ Migrate VM domain from one datastore to another. - Copy VM Image to new datastore - Update VM XML definition - Update database """ logger.debug(sys.path) vm_details = current.db.vm_data[vmid] # datastore_id = vm_details["datastore_id"] logger.debug("Inside live disk migration block") try: (connection_object, domain) = getVirshDomainConn(vm_details) datastore = current.db.datastore[destination_datastore_id] vm_directory_path = datastore.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity logger.debug("Creating vm directory on other datastore...") if not os.path.exists (vm_directory_path): os.makedirs(vm_directory_path) diskpath = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2' current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2' logger.debug(current_disk_file) xmlfile = domain.XMLDesc(0) if(live_migration==False): rc = os.system("cp %s %s" % (current_disk_file, diskpath)) if rc != 0: logger.error("Copy not successful") raise Exception("Copy not successful") else: logger.debug("Copied successfully") else: if domain.isActive: domain.undefine() root = etree.fromstring(xmlfile) target_elem = root.find("devices/disk/target") target_disk = target_elem.get('dev') # # destxml = generate_blockcopy_xml(diskpath,target_disk) flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY domain.blockRebase(target_disk, diskpath, 0, flag) block_info_list = domain.blockJobInfo(current_disk_file,0) while(block_info_list['end'] != block_info_list['cur']): logger.debug("time to sleep") time.sleep(60) block_info_list = domain.blockJobInfo(current_disk_file,0) domain.blockJobAbort(current_disk_file, VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT) source_elem = root.find("devices/disk/source") source_elem.set('file',diskpath) newxml_file = etree.tostring(root) domain = connection_object.defineXML(newxml_file) vm_details.update_record(datastore_id=destination_datastore_id) if os.path.exists (diskpath): os.remove(current_disk_file) restore_symboltable_path = current_disk_path+"/restore_symboltable" if os.path.exists (restore_symboltable_path): logger.debug(restore_symboltable_path) os.remove(restore_symboltable_path) os.rmdir(current_disk_path) connection_object.close() message = vm_details.vm_identity + " is migrated successfully to new datastore." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: #undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id) connection_object.close() logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id): """ Undo migration in case of any issue """ # undo databse changes vm_details.update_record(datastore_id=datastore_id) if domain.isActive: logger.debug("domain is active") block_info_list = domain.blockJobInfo(current_disk_file,0) if(bool(block_info_list) == True): while(block_info_list['end'] != block_info_list['cur']): logger.debug("time to sleep") time.sleep(60) block_info_list = domain.blockJobInfo(current_disk_file,0) if(block_info_list['end'] == block_info_list['cur']): domain.blockJobAbort(current_disk_file) block_info_list = domain.blockJobInfo(current_disk_file,0) if os.path.exists (diskpath): os.remove(diskpath) os.rmdir(vm_directory_path) def migrate(parameters): """ Migrates VM to new host """ vmid = parameters['vm_id'] logger.debug("Inside migrate() function for vm_id: "+str(vmid)) destination_host_id = parameters['destination_host'] if parameters['live_migration'] == 'on': live_migration = True else: live_migration = False return migrate_domain(vmid, destination_host_id, live_migration) def migrate_datastore(parameters): """ Migrates VM to new datastore """ logger.debug("Inside migrate_datastore() function") vmid = parameters['vm_id'] destination_ds_id = parameters['destination_ds'] if parameters['live_migration'] == 'on': live_migration = True else: live_migration = False return migrate_domain_datastore(vmid, destination_ds_id, live_migration) def snapshot(parameters): """ Snapshots a vm """ logger.debug("Inside snapshot() function") vm_id = parameters['vm_id'] snapshot_type = parameters['snapshot_type'] try: vm_details = current.db.vm_data[vm_id] if is_pingable(str(vm_details.private_ip.private_ip)): logger.debug("VM is pingable. Starting to start with snapshotting...") if snapshot_type != current.SNAPSHOT_USER: snapshots = current.db((current.db.snapshot.vm_id == vm_id) & (current.db.snapshot.type == snapshot_type)).select() #Delete the existing Daily/Monthly/Yearly snapshot for snapshot_cron in snapshots: logger.debug(snapshot_cron) delete_snapshot({'vm_id':vm_id, 'snapshot_id':snapshot_cron.id}) snapshot_name = get_datetime().strftime("%I:%M%p_%B%d,%Y") domain = getVirshDomain(vm_details) xmlDesc = "<domainsnapshot><name>%s</name></domainsnapshot>" % (snapshot_name) domain.snapshotCreateXML(xmlDesc, 0) message = "Snapshotted successfully." current.db.snapshot.insert(vm_id = vm_id, datastore_id = vm_details.datastore_id, snapshot_name = snapshot_name, type = snapshot_type) logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: message = "Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip) raise Exception("Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip)) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def revert(parameters): """ Reverts to snapshot """ logger.debug("Inside revert snapshot() function") vm_id = parameters['vm_id'] snapshotid = parameters['snapshot_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name'] snapshot = domain.snapshotLookupByName(snapshot_name, 0) domain.revertToSnapshot(snapshot, 0) message = "Reverted to snapshot successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def delete_snapshot(parameters): """ Deletes a snapshot """ logger.debug("Inside delete snapshot() function") vm_id = parameters['vm_id'] snapshotid = parameters['snapshot_id'] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: domain = getVirshDomain(vm_details) snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name'] snapshot = None try: snapshot = domain.snapshotLookupByName(snapshot_name, 0) except libvirtError: logger.debug("Snapshot %s not found" %(snapshot_name)) if snapshot != None: snapshot.delete(0) message = "Deleted snapshot successfully." logger.debug(message) current.db(current.db.snapshot.id == snapshotid).delete() logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def update_security_domain(vm_details, security_domain_id, xmlDesc=None): """ Get new IP for given security domain. Update the VM XML with new mac_address and update the information in DB """ # fetch new private IP from db from given security domain private_ip_info = _get_private_ip_mac(security_domain_id) # update vm config to add new mac address. root = etree.fromstring(xmlDesc) mac_elem = root.find("devices/interface[@type='bridge']/mac") mac_elem.set('address', private_ip_info.mac_addr) vlan_tag_elem = root.find("devices/interface[@type='bridge']/vlan/tag") vlan_tag_elem.set('id', private_ip_info.vlan.vlan_tag) # update NAT IP mapping, if public IP present if vm_details.public_ip: remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip) create_mapping(vm_details.public_ip.public_ip, private_ip_info.private_ip) # update vm_data current.db(current.db.vm_data.id == vm_details.id).update(security_domain = security_domain_id, private_ip = private_ip_info.id) return etree.tostring(root) def edit_vm_config(parameters): """ Edits vm configuration """ logger.debug("Inside edit vm config() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] message = "" try: connection_object, domain = getVirshDomainConn(vm_details) if 'vcpus' in parameters: new_vcpus = int(parameters['vcpus']) domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_VCPU_MAXIMUM) domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_AFFECT_CONFIG) message += "Edited vCPU successfully." current.db(current.db.vm_data.id == vm_id).update(vCPU = new_vcpus) if 'ram' in parameters: new_ram = int(parameters['ram']) * 1024 logger.debug(str(new_ram)) domain.setMemoryFlags(new_ram, VIR_DOMAIN_MEM_MAXIMUM) domain.setMemoryFlags(new_ram, VIR_DOMAIN_AFFECT_CONFIG) message += " And edited RAM successfully." current.db(current.db.vm_data.id == vm_id).update(RAM = int(parameters['ram'])) if 'public_ip' in parameters: enable_public_ip = parameters['public_ip'] if enable_public_ip: public_ip_pool = _choose_random_public_ip() if public_ip_pool: create_mapping(public_ip_pool.public_ip, vm_details.private_ip.private_ip) current.db.vm_data[vm_id] = dict(public_ip=public_ip_pool.id) message += "Edited Public IP successfully." else: raise Exception("Available Public IPs are exhausted.") else: remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip) current.db.vm_data[vm_id] = dict(public_ip = None) if 'security_domain' in parameters: logger.debug('Updating security domain') xmlfile = update_security_domain(vm_details, parameters['security_domain'], domain.XMLDesc(0)) domain = connection_object.defineXML(xmlfile) if domain.isActive(): domain.reboot(0) message += "Edited security domain successfully" connection_object.close() logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def _get_clone_properties(vm_details, cloned_vm_details, vm_properties): """ Get properties for Cloned VM. """ datastore = _choose_datastore() vm_properties['datastore'] = datastore logger.debug("Datastore selected is: " + str(datastore)) vm_properties['security_domain'] = vm_details.security_domain vm_properties['public_ip_req'] = False # Finds mac address, ip address and vnc port for the cloned vm _choose_mac_ip_vncport(vm_properties) logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + \ " VNCPORT is : " + str(vm_properties['vnc_port'])) # Template and host of parent vm vm_properties['template'] = current.db(current.db.template.id == vm_details.template_id).select()[0] vm_properties['vm_host_details'] = current.db.host[vm_details.host_id] vm_properties['host'] = vm_properties['vm_host_details'].id # Creates a directory for the cloned vm logger.debug("Creating directory for cloned vm...") cloned_vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + cloned_vm_details.vm_identity if not os.path.exists (cloned_vm_directory_path): os.makedirs(cloned_vm_directory_path) clone_file_parameters = ' --file ' + cloned_vm_directory_path + '/' + cloned_vm_details.vm_identity + '.qcow2' else: raise Exception("Directory with same name as vmname already exists.") # Creates a folder for additional disks of the cloned vm vm = current.db(current.db.vm_data.vm_identity == vm_details.vm_identity).select().first() disk_details_of_cloning_vm = current.db(current.db.attached_disks.vm_id == vm.id).select(orderby=current.db.attached_disks.attached_disk_name) logger.debug(disk_details_of_cloning_vm) already_attached_disks = len(disk_details_of_cloning_vm) cloned_vm_extra_disks_directory = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ datastore.ds_name + '/' + cloned_vm_details.vm_identity if already_attached_disks > 0: if not os.path.exists (cloned_vm_extra_disks_directory): logger.debug("Making Directory") os.makedirs(cloned_vm_extra_disks_directory) count = already_attached_disks while already_attached_disks > 0: disk_name = cloned_vm_details.vm_identity + '_disk' + str(count - already_attached_disks + 1) + '.qcow2' clone_file_parameters += ' --file ' + cloned_vm_extra_disks_directory + '/' + disk_name current.db.attached_disks.insert(vm_id = cloned_vm_details.id, datastore_id = datastore.id , attached_disk_name = disk_name, capacity = disk_details_of_cloning_vm[count - already_attached_disks].capacity) already_attached_disks -= 1 return (clone_file_parameters) def migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties): """ Migrates cloned vm to new host """ try: new_host_ip_for_cloned_vm = current.db.host[new_host_id_for_cloned_vm].host_ip.private_ip logger.debug("New host ip for cloned vm is: " + str(new_host_ip_for_cloned_vm)) flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_OFFLINE|VIR_MIGRATE_UNSAFE logger.debug("Clone currently on: " + str(vm_details.host_id.host_ip)) (current_host_connection_object, domain) = getVirshDomainConn(None, vm_details.host_id.host_ip, cloned_vm_details.vm_identity) logger.debug("Starting to migrate cloned vm to host " + str(new_host_ip_for_cloned_vm)) domain.migrateToURI("qemu+ssh://root@" + new_host_ip_for_cloned_vm + "/system", flags , None, 0) current_host_connection_object.close() logger.debug("Successfully migrated cloned vm to host " + str(new_host_ip_for_cloned_vm)) cloned_vm_details.update_record(host_id = new_host_id_for_cloned_vm) vm_properties['host'] = new_host_id_for_cloned_vm return True except libvirt.libvirtError,e: message = e.get_error_message() logger.debug("Error: " + message) return False def clone(vmid): """ Clones vm """ vm_properties = {} logger.debug("Inside clone() function") cloned_vm_details = current.db.vm_data[vmid] vm_details = current.db(current.db.vm_data.id == cloned_vm_details.parent_id).select().first() try: domain = getVirshDomain(vm_details) if domain.info()[0] != VIR_DOMAIN_SHUTOFF: raise Exception("VM is not shutoff. Check vm status.") clone_file_parameters = _get_clone_properties(vm_details, cloned_vm_details, vm_properties) logger.debug("cloned vm properties after clone_file_parameters" + str(vm_properties)) host = vm_properties['vm_host_details'] logger.debug("host is: " + str(host)) logger.debug("host details are: " + str(host)) (used_ram, used_cpu) = host_resources_used(host.id) logger.debug("uram: " + str(used_ram) + " used_cpu: " + str(used_cpu) + " host ram: " + str(host.RAM) +" host cpu: " + str(host.CPUs)) host_ram_after_200_percent_overcommitment = math.floor((host.RAM * 1024) * 2) host_cpu_after_200_percent_overcommitment = math.floor(host.CPUs * 2) logger.debug("host_ram_after_200_percent_overcommitment in MB " + str(host_ram_after_200_percent_overcommitment)) logger.debug("host_cpu_after_200_percent_overcommitment " + str(host_cpu_after_200_percent_overcommitment)) logger.debug("Available RAM on host: %s, Requested RAM: %s" % ((host_ram_after_200_percent_overcommitment - used_ram), vm_details.RAM)) logger.debug("Available CPUs on host: %s, Requested CPU: %s " % ((host_cpu_after_200_percent_overcommitment - used_cpu), vm_details.vCPU)) if((( host_ram_after_200_percent_overcommitment - used_ram) >= vm_details.RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vm_details.vCPU) and (vm_details.vCPU <= host.CPUs)): clone_command = "virt-clone --original " + vm_details.vm_identity + " --name " + cloned_vm_details.vm_identity + \ clone_file_parameters + " --mac " + vm_properties['mac_addr'] command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', clone_command, None, True) logger.debug(command_output) logger.debug("Updating db after cloning") update_db_after_vm_installation(cloned_vm_details, vm_properties, parent_id = vm_details.id) message = "Cloned successfully. " try: new_host_id_for_cloned_vm = find_new_host(cloned_vm_details.RAM, cloned_vm_details.vCPU) if new_host_id_for_cloned_vm != host.id: if migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties): message += "Found new host and migrated successfully." else: message += "Found new host but not migrated successfully." else: message += "New host selected to migrate cloned vm is same as the host on which it currently resides." except: message += "Could not find host to migrate cloned vm." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: raise Exception("Host resources exhausted. Migrate the host vms and then try.") except: _free_vm_properties(cloned_vm_details, vm_properties) logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def attach_extra_disk(parameters): """ Attaches extra disk to VM """ logger.debug("Inside attach extra disk() function") vmid = parameters['vm_id'] disk_size = parameters['disk_size'] vm_details = current.db.vm_data[vmid] logger.debug(str(vm_details)) try: if (serve_extra_disk_request(vm_details, disk_size, vm_details.host_id.host_ip.private_ip)): current.db(current.db.vm_data.id == vmid).update(extra_HDD = vm_details.extra_HDD + disk_size) message = "Attached extra disk successfully" logger.debug(message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: message = " Your request for additional HDD could not be completed at this moment. Check logs." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_FAILED, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def get_vm_image_location(datastore_id, vm_identity): """ Get the file path for qcow2 image of a VM """ datastore = current.db.datastore[datastore_id] vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_identity vm_image_name = vm_directory_path + '/' + vm_identity + '.qcow2' image_present = True if os.path.exists(vm_image_name) else False return (vm_image_name, image_present) def get_extra_disk_location(datastore_id, vm_identity, disk_name, get_disk_size=False): """ Get the file path for qcow2 image of teh extra disk """ datastore = current.db.datastore[datastore_id] if datastore: vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ datastore.ds_name + '/' + vm_identity ext = '' if disk_name.endswith('.qcow2') else '.qcow2' disk_image_path = vm_extra_disks_directory_path + '/' + disk_name + ext image_present = True if os.path.exists(disk_image_path) else False disk_size = 0 if image_present & get_disk_size: command = "qemu-img info " + disk_image_path + " | grep 'virtual size'" ret = os.popen(command).read() # Returns e.g. virtual size: 40G (42949672960 bytes) disk_size = int(ret[ret.index(':')+1:ret.index('G ')].strip()) return (disk_image_path, image_present, disk_size) else: return (None, False, 0) def launch_existing_vm_image(vm_details): """ Launch existing VM image - Choose new private_ip & mac_addr if not provided - Get location for VM image - Launch VM on given host - Attach extra disk to VM if defined - Create mapping between public IP and private IP if required """ logger.debug('Launch existing VM image') vm_properties = {} vm_properties['ram'] = vm_details.RAM vm_properties['vcpus'] = vm_details.vCPU vm_properties['security_domain'] = vm_details.security_domain #If Private IP was already chosen previously and DHCP entry is done if vm_details.private_ip != None: private_ip_info = current.db.private_ip_pool[vm_details.private_ip] if private_ip_info: vm_properties['private_ip'] = private_ip_info.private_ip vm_properties['mac_addr'] = private_ip_info.mac_addr vm_properties['vlan_name'] = private_ip_info.vlan.name vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag if vm_details.public_ip == None: vm_properties['public_ip_req'] = False else: vm_properties['public_ip_req'] = True if vm_details.public_ip.is_active: vm_properties['public_ip'] = vm_details.public_ip.public_ip _choose_mac_ip_vncport(vm_properties) vm_properties['template'] = current.db.template[vm_details.template_id] vm_properties['datastore'] = current.db.datastore[vm_details.datastore_id] vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU) (vm_image_name, image_present) = get_vm_image_location(vm_details.datastore_id, vm_details.vm_identity) if image_present: launch_vm_on_host(vm_details, vm_image_name, vm_properties) #Check if extra disk needs to be attached attached_disks = current.db((current.db.attached_disks.vm_id == vm_details.id)).select() if attached_disks: #Extra disk to be attached to the VM host_ip = current.db.host[vm_properties['host']].host_ip.private_ip disk_counter = 1 for attached_disk in attached_disks: disk_size = attach_disk(vm_details, attached_disk.attached_disk_name, host_ip, disk_counter, True) current.db(current.db.attached_disks.vm_id == attached_disk.vm_id and current.db.attached_disks.attached_disk_name==attached_disk.attached_disk_name ).update(capacity = disk_size) vm_details.extra_HDD += disk_size disk_counter += 1 #Create mapping of Private_IP and Public_IP if vm_properties['public_ip_req']: create_mapping(vm_properties['public_ip'], vm_properties['private_ip']) update_db_after_vm_installation(vm_details, vm_properties) def save_vm_as_template(parameters): """ Save VM as template If template for given VM already exists, replace with new template. """ logger.debug("Inside save_as_template() function") vm_id = parameters['vm_id'] vm_data = current.db.vm_data[vm_id] user_list = [] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: (is_templated_created, new_template, old_template) = create_new_template(vm_details) if (is_templated_created): #remove old template if os.path.exists (old_template): os.remove(old_template) else: for user in current.db(current.db.user_vm_map.vm_id == vm_id).select(current.db.user_vm_map.user_id): user_list.append(user.user_id) new_template_id = current.db.template.insert(name = vm_data.vm_name + "_template" , os = vm_data.template_id.os , os_name = vm_data.template_id.os_name , os_version = vm_data.template_id.os_version , os_type = vm_data.template_id.os_type , arch = vm_data.template_id.arch , hdd = vm_data.template_id.hdd , hdfile = new_template , type = vm_data.template_id.type , tag = vm_data.vm_name + "_template" , datastore_id = vm_data.template_id.datastore_id, owner = user_list) current.db.vm_data[vm_id] = dict(saved_template = new_template_id) message = "User Template saved successfully" logger.debug(message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: message = " Vm Template not saved " logger.debug("Task Status: %s " % message) return (current.TASK_QUEUE_STATUS_FAILED, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def delete_template(parameters): """ Delete template """ logger.debug("Inside delete_template() function") template_id = parameters['template_id'] template_details = current.db.template[template_id] template_path = template_details["hdfile"] if os.path.exists(template_path): os.remove(template_path) # set value in db also parent_vm = current.db.vm_data(saved_template = template_id) if parent_vm: parent_vm.update_record(saved_template = None) del current.db.template[template_id] return (current.TASK_QUEUE_STATUS_SUCCESS, "") def create_new_template(vm_details): """ Create a new template from the VM image - Create template directory - Copy VM Image to directory(Live copy if VM is running) - Update database to define new template """ try: (connection_object, domain) = getVirshDomainConn(vm_details) xmlfile = domain.XMLDesc(0) logger.debug("connection object created") datastore = _choose_datastore() logger.debug(datastore) new_template_dir = datastore.system_mount_point + '/' +get_constant('templates_dir') + '/' + vm_details.requester_id.first_name logger.debug("Creating user template directory...") if not os.path.exists (new_template_dir): os.makedirs(new_template_dir) template = new_template_dir + '/' + vm_details.vm_identity + '_template.qcow2' template_location = '/' + vm_details.requester_id.first_name + '/' + vm_details.vm_identity + '_template.qcow2' old_template = new_template_dir + '/' + vm_details.vm_identity + '_template_old.qcow2' if os.path.exists (template): # move template to some other path logger.debug("move template to some other file") shutil.move(template, old_template) logger.debug("template " + template) current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2' if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED): logger.debug("vm is active in db") if domain.isActive(): domain.undefine() root = etree.fromstring(xmlfile) target_elem = root.find("devices/disk/target") target_disk = target_elem.get('dev') flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY domain.blockRebase(target_disk, template, 0, flag) block_info_list = domain.blockJobInfo(current_disk_file,0) while(block_info_list['end'] != block_info_list['cur']): logger.debug("time to sleep") time.sleep(60) block_info_list = domain.blockJobInfo(current_disk_file,0) domain.blockJobAbort(current_disk_file) domain = connection_object.defineXML(xmlfile) connection_object.close() return (True, template_location, old_template) else: logger.debug("domain is not running on host") return (False, template_location, old_template) elif(vm_details.status == current.VM_STATUS_SHUTDOWN): if domain.isActive(): logger.debug("Domain is still active...Please try again after some time!!!") return (False, template_location, old_template) else: logger.debug("copying") copy_command = "cp "+current_disk_file+" "+template logger.debug("copy_command"+copy_command) #rc = os.system("cp %s %s" % (current_disk_file, template)) logger.debug("copy command running on " + vm_details.host_id.host_ip.private_ip + " host") command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', copy_command) logger.debug(command_output) return (True, template_location, old_template) except: if not domain.isPersistent(): domain = connection_object.defineXML(xmlfile) connection_object.close() logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (False, template_location, old_template)
# -*- coding: utf-8 -*- ################################################################################### from gluon import current from helper import get_constant, execute_remote_cmd, config, get_datetime, \ log_exception, is_pingable, get_context_path from libvirt import * # @UnusedWildImport from log_handler import logger from nat_mapper import create_mapping, remove_mapping import math, shutil, libvirt, os, time, random import xml.etree.ElementTree as etree def _choose_datastore(): """ Chooses datastore from a list of available datastores """ # datastore_capacity = current.db(current.db.datastore.id >= 0).select(orderby = current.db.datastore.used datastores = current.db(current.db.datastore.id >= 0).select() datastore_length = len(datastores) logger.debug("datastore_lengtn" + str(datastore_length)) if(datastore_length == 0): raise Exception("No datastore found.") else: count = datastore_length available_datastores = {} while count != 0: available = datastores[datastore_length-count].capacity - datastores[datastore_length-count].used available_datastores[datastores[datastore_length-count]] = available count = count-1 z = [(i,available_datastores[i]) for i in available_datastores] z.sort(key=lambda x: x[1]) available_datastores = z logger.debug("available d" + str(available_datastores[-1])) first_elts = available_datastores[-1] first_elts = first_elts[0] logger.debug("selected database" + str(first_elts)) return first_elts def host_resources_used(host_id): """ Returns resources utilization of a host in MB, Count """ RAM = 0.0 CPU = 0.0 vms = current.db((current.db.vm_data.host_id == host_id) & (current.db.vm_data.status != current.VM_STATUS_UNKNOWN) & (current.db.vm_data.status != current.VM_STATUS_IN_QUEUE)).select() logger.debug("vms selected are: " + str(vms)) for vm_data in vms: RAM += vm_data.RAM CPU += vm_data.vCPU return (math.ceil(RAM),math.ceil(CPU)) def getVirshDomainConn(vm_details, host_ip=None, domain_name=None): """ Generic method to establish libvirt connection """ if vm_details != None: host_ip = vm_details.host_id.host_ip.private_ip domain_name = vm_details.vm_identity connection_object = libvirt.open("qemu+ssh://root@" + host_ip + "/system") domain = connection_object.lookupByName(domain_name) return (connection_object, domain) def getVirshDomain(vm_details): """ Generic method to establish libvirt connection """ (connection_object, domain) = getVirshDomainConn(vm_details) connection_object.close() return domain def _set_portgroup_in_vm(domain_name, portgroup, host_ip, vlan_tag): """ Set the vlan tag in network configuration of VM This is required to ensure that VM fetches IP of its vlan from DHCP """ (connection_object, domain) = getVirshDomainConn(None, host_ip, domain_name) xml = etree.fromstring(domain.XMLDesc(0)) source_network_element = xml.find('.//interface/source') source_network_string=etree.tostring(source_network_element) logger.debug("Source network is " + source_network_string) if source_network_string.find(" bridge=") != -1: logger.debug("Source is set to bridge adding <vlan><tag_id> to the interface tag ") root_new = xml.find('.//interface') root_new_vlan= etree.SubElement(root_new, 'vlan') root_new_tag= etree.SubElement(root_new_vlan, 'tag') root_new_tag.set('id',vlan_tag) logger.debug("After append root_new_vlan is " + etree.tostring(root_new_vlan)) elif source_network_string.find(" network=") != -1: logger.debug("Source is set to network adding portgroup to the source tag ") source_network_element.set('portgroup', portgroup) logger.debug("Changed source network is " + etree.tostring(source_network_element)) else: logger.debug("Neither VM nor vlan tagId is added in the xml" ) domain = connection_object.defineXML(etree.tostring(xml)) domain.destroy() domain.create() domain.isActive() connection_object.close() def _get_private_ip_mac(security_domain_id): """ Chooses a random Private IP from the pool, such that: - It is not assigned to any VM or host - It belongs to VLAN of given security domain """ vlans = current.db(current.db.security_domain.id == security_domain_id)._select(current.db.security_domain.vlan) private_ip_pool = current.db((~current.db.private_ip_pool.id.belongs(current.db(current.db.vm_data.private_ip != None)._select(current.db.vm_data.private_ip))) & (~current.db.private_ip_pool.id.belongs(current.db(current.db.host.host_ip != None)._select(current.db.host.host_ip))) & (current.db.private_ip_pool.vlan.belongs(vlans))).select(current.db.private_ip_pool.ALL, orderby='<random>').first() if private_ip_pool: return private_ip_pool else: sd = current.db.security_domain[security_domain_id] raise Exception(("Available MACs are exhausted for security domain '%s'." % sd.name)) def _choose_random_public_ip(): """ Chooses a random Public IP from the pool, such that: - It is not assigned to any VM - It is not assigned to any host - IP is marked active. """ public_ip_pool = current.db((~current.db.public_ip_pool.id.belongs(current.db(current.db.vm_data.public_ip != None)._select(current.db.vm_data.public_ip))) & (~current.db.public_ip_pool.id.belongs(current.db(current.db.host.public_ip != None)._select(current.db.host.public_ip))) & (current.db.public_ip_pool.is_active == True)) \ .select(current.db.public_ip_pool.ALL, orderby='<random>').first() return public_ip_pool def _choose_mac_ip(vm_properties): """ Chooses mac address and ip address for a vm to be installed. It also chooses a random public IP if requested """ if not 'private_ip' in vm_properties: private_ip_info = _get_private_ip_mac(vm_properties['security_domain']) vm_properties['private_ip'] = private_ip_info.private_ip vm_properties['mac_addr'] = private_ip_info.mac_addr vm_properties['vlan_name'] = private_ip_info.vlan.name vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag if vm_properties['public_ip_req']: if 'public_ip' not in vm_properties: public_ip_pool = _choose_random_public_ip() if public_ip_pool: vm_properties['public_ip'] = public_ip_pool.public_ip else: raise Exception("Available Public IPs are exhausted.") else: vm_properties['public_ip'] = None def _choose_mac_ip_vncport(vm_properties): """ Chooses mac address, ip address and vncport for a vm to be installed """ _choose_mac_ip(vm_properties) start_range = int(get_constant('vncport_start_range')) end_range = int(get_constant('vncport_end_range')) vnc_ports_taken = current.db().select(current.db.vm_data.vnc_port) while True: random_vnc_port = random.randrange(start_range, end_range, 1) if not random_vnc_port in vnc_ports_taken: break; vm_properties['vnc_port'] = str(random_vnc_port) def find_new_host(RAM, vCPU): """ Select a random host from list of 3 hosts with available RAM and CPU Availability is checked with 200 percent over-commitment. """ hosts = current.db(current.db.host.status == 1).select() hosts = hosts.as_list(True,False) count = 3 selected_hosts = [] while count != 0 and hosts: host = random.choice(hosts) logger.debug("Checking host =" + host['host_name']) (used_ram, used_cpu) = host_resources_used(host['id']) logger.debug("used ram: " + str(used_ram) + " used cpu: " + str(used_cpu) + " host ram: " + str(host['RAM']) + " host cpu "+ str(host['CPUs'])) host_ram_after_200_percent_overcommitment = math.floor((host['RAM'] * 1024) * 2) host_cpu_after_200_percent_overcommitment = math.floor(host['CPUs'] * 2) logger.debug("ram available: %s cpu available: %s cpu < max cpu: %s" % ((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM), ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU), (vCPU <= host['CPUs']) )) if((( host_ram_after_200_percent_overcommitment - used_ram) >= RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vCPU) and (vCPU <= host['CPUs'])): selected_hosts.append(host) count = count -1 hosts.remove(host) if selected_hosts: #Sort selected host list by Ram first then Cpu selected_host = sorted(selected_hosts,key=lambda k: k['RAM'])[0] return selected_host['id'] #If no suitable host found raise Exception("No active host is available for a new vm.") def allocate_vm_properties(vm_details): """ Allocates vm properties ( datastore, host, ip address, mac address, vnc port, ram, vcpus) """ logger.debug("Inside allocate_vm_properties()...") vm_properties = {} vm_properties['datastore'] = _choose_datastore() logger.debug("Datastore selected is: " + str(vm_properties['datastore'])) vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU) logger.debug("Host selected is: " + str(vm_properties['host'])) vm_properties['public_ip_req'] = False if (vm_details.public_ip == None) else True vm_properties['security_domain'] = vm_details.security_domain _choose_mac_ip_vncport(vm_properties) logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + " VNCPORT is : " \ + str(vm_properties['vnc_port']) + " Vlan tag is " + str(vm_properties['vlan_tag']) ) vm_properties['ram'] = vm_details.RAM vm_properties['vcpus'] = vm_details.vCPU return vm_properties def create_vm_image(vm_details, datastore): """ Create a VM image - Creates a directory for the new VM using vm_identity - Find the location of template image requested for - Copy the template image from its location to new vm directory """ # Creates a directory for the new vm vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity logger.debug("Creating vm directory...") if not os.path.exists (vm_directory_path): os.makedirs(vm_directory_path) else: raise Exception("Directory with same name as vmname already exists.") # Finds the location of template image that the user has requested for its vm. template = current.db.template[vm_details.template_id] vm_image_name = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2' # Copies the template image from its location to new vm directory storage_type = config.get("GENERAL_CONF","storage_type") copy_command = 'ndmpcopy ' if storage_type == current.STORAGE_NETAPP_NFS else 'cp ' #template_dir = get_constant('vm_templates_datastore') if copy_command == 'cp ': template_location = datastore.system_mount_point + '/' + get_constant('templates_dir') + '/' + template.hdfile logger.debug("cp %s %s" % (template_location, vm_image_name)) rc = os.system("cp %s %s" % (template_location, vm_image_name)) if rc != 0: logger.error("Copy not successful") raise Exception("Copy not successful") else: logger.debug("Copied successfully") elif copy_command == 'ndmpcopy ': template_dir = template.datastore_id.path logger.debug(template_dir) logger.debug("Copy in progress when storage type is " + str(storage_type)) command_to_execute = copy_command + template_dir + '/' + get_constant("templates_dir") + '/' + \ template.hdfile + ' ' + datastore.path + '/' + get_constant('vms') + '/' + \ vm_details.vm_identity logger.debug("ndmpcopy command: " + str(command_to_execute)) command_output = execute_remote_cmd(datastore.ds_ip, datastore.username, command_to_execute, datastore.password) logger.debug(command_output) logger.debug("Copied successfully.") try: vm_template_name = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity + '/' + template.hdfile os.rename(vm_template_name, vm_image_name) logger.debug("Template renamed successfully") except: logger.debug("Template rename not successful") raise Exception("Template rename not successful") return (template, vm_image_name) def _get_install_command(vm_details, vm_image_location, vm_properties): """ Generates install command for vm """ template = vm_properties['template'] bus = ',bus=virtio' optional = ' --import --os-type=' + template.os model = ',model=virtio' if (template.arch != 'amd64' and template.os == 'Linux'): optional = optional + ' --arch=' + template.arch + ' ' format_command = '' if (template.type == 'QCOW2'): format_command = ',format=qcow2' if (template.os == 'Windows'): bus = '' model = '' install_command = 'virt-install \ --name=' + vm_details.vm_identity + ' \ --ram=' + str(vm_properties['ram']) + ' \ --vcpus=' + str(vm_properties['vcpus']) + optional + ' \ --disk path=' + vm_image_location + format_command + bus + ',cache=none' + ' \ --network network='+current.LIBVIRT_NETWORK + model + ',mac=' + vm_properties['mac_addr'] + ' \ --graphics vnc,port=' + vm_properties['vnc_port'] + ',listen=0.0.0.0,password=<PASSWORD> \ --noautoconsole \ --autostart \ --force' return install_command def _generate_disk_xml(diskpath,target_disk): """ Generates xml for defining new disk """ root_element = etree.Element('disk',attrib = {'type':'block','device':'disk'}) etree.SubElement(root_element, 'driver',attrib = {'name':'qemu','cache':'none', 'type':'qcow2'}) etree.SubElement(root_element, 'source', attrib = {'dev':diskpath}) etree.SubElement(root_element, 'target', attrib = {'dev': target_disk}) return (etree.tostring(root_element)) def create_extra_disk_image(vm_details, disk_name, size, datastore): """ Create extra disk image """ vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ datastore.ds_name + '/' + vm_details.vm_identity if not os.path.exists (vm_extra_disks_directory_path): logger.debug("Making Directory") os.makedirs(vm_extra_disks_directory_path) diskpath = vm_extra_disks_directory_path + '/' + disk_name command= "qemu-img create -f qcow2 "+ diskpath + " " + str(size) + "G" output = os.system(command) return False if output != 0 else True def attach_disk(vm_details, disk_name, hostip, already_attached_disks, new_vm): """ Attach given disk to the VM """ try: (connection_object, domain) = getVirshDomainConn(None, hostip, vm_details.vm_identity) #already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm.id).select()) logger.debug("Value of alreadyattached is : " + str(already_attached_disks)) (diskpath, device_present, disk_size) = get_extra_disk_location(vm_details.datastore_id, vm_details.vm_identity, disk_name, True) if not device_present: raise Exception("Device to be attached %s missing" %(diskpath)) # Attaching disk to vm using libvirt API target_disk = "vd" + chr(97 + already_attached_disks + 1) logger.debug(target_disk) logger.debug("...................") xmlDescription = _generate_disk_xml(diskpath, target_disk) logger.debug(xmlDescription) logger.debug("new vm is %s " % new_vm) if new_vm: logger.debug("Starting to attach disk on new vm request.") domain.destroy() logger.debug("VM destroyed") domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG) logger.debug("Disk attached") logger.debug("Turn on vm") domain.create() logger.debug("VM started") domain.isActive() elif vm_details.status == current.VM_STATUS_SHUTDOWN: logger.debug("Starting to attach disk while vm is shutdown.") domain.attachDeviceFlags(xmlDescription, VIR_DOMAIN_AFFECT_CONFIG) logger.debug("Disk attached") else: raise Exception("VM is not in shutdown state. Check its status on host") xmlfile = domain.XMLDesc(0) domain = connection_object.defineXML(xmlfile) logger.debug("VM XML redefined") connection_object.close() return disk_size except: logger.exception('Exception: ') return 0 def serve_extra_disk_request(vm_details, disk_size, host_ip, new_vm = False): """ Serves extra disk request and updates db """ logger.debug("Starting to serve extra disk request...") logger.debug("new vm is %s " % new_vm) datastore = _choose_datastore() already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm_details.id).select()) disk_name = vm_details.vm_identity + "_disk" + str(already_attached_disks + 1) + ".qcow2" disk_created = create_extra_disk_image(vm_details, disk_name, disk_size, datastore) vm_details.datastore_id = datastore.id if disk_created: if (attach_disk(vm_details, disk_name, host_ip, already_attached_disks, new_vm)): current.db.attached_disks.insert(vm_id = vm_details.id, datastore_id = datastore.id , attached_disk_name = disk_name, capacity = disk_size) current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(disk_size)) return True return False def launch_vm_on_host(vm_details, vm_image_location, vm_properties): """ Launches a vm image on host """ attach_disk_status_message = '' install_command = _get_install_command(vm_details, vm_image_location, vm_properties) # Starts installing a vm host_ip = current.db.host[vm_properties['host']].host_ip.private_ip logger.debug("Installation started...") logger.debug("Host is "+ host_ip) logger.debug("Installation command : " + install_command) command_output = execute_remote_cmd(host_ip, 'root', install_command) logger.debug(command_output) logger.debug("Starting to set portgroup in vm...") _set_portgroup_in_vm(vm_details['vm_identity'], vm_properties['vlan_name'], host_ip, vm_properties['vlan_tag']) logger.debug("Portgroup set in vm") # Serving HDD request if (int(vm_details.extra_HDD) != 0): if (serve_extra_disk_request(vm_details, vm_details.extra_HDD, host_ip, new_vm = True)): message = "Attached extra disk successfully." attach_disk_status_message += message logger.debug(message) else: attach_disk_status_message += "Attached extra disk failed." return attach_disk_status_message def check_if_vm_defined(hostip, vmname): """ Checks if a newly created vm is successfully defined """ vm_defined = False try: connection_object = libvirt.openReadOnly('qemu+ssh://root@'+ hostip +'/system') domain = connection_object.lookupByName(vmname) if domain.ID() in connection_object.listDomainsID(): vm_defined = True connection_object.close() return vm_defined except: return False def _free_vm_properties(vm_details, vm_properties): """ Frees vm properties in-case installation has failed mid-way """ logger.debug("VM installation fails..Starting to free vm properties") if vm_properties: host_ip_of_vm = current.db.host[vm_properties['host']].host_ip.private_ip logger.debug("Host IP of vm is " + str(host_ip_of_vm)) if check_if_vm_defined(host_ip_of_vm, vm_details.vm_identity): connection_object = libvirt.open('qemu+ssh://root@'+ host_ip_of_vm +'/system') domain = connection_object.lookupByName(vm_details.vm_identity) logger.debug("Starting to delete vm from host..") domain.destroy() domain.undefine() connection_object.close() logger.debug("VM deleted.") current.db(current.db.attached_disks.vm_id == vm_details.id).delete() if 'datastore' in vm_properties: vm_directory_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity vm_extra_disk_dir_path = vm_properties['datastore'].system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + vm_properties['datastore'].ds_name + '/' + vm_details.vm_identity if os.path.exists (vm_directory_path): logger.debug("Starting to delete vm directory.") shutil.rmtree(vm_directory_path) if os.path.exists (vm_extra_disk_dir_path): logger.debug("Starting to delete vm extra disk directory.") shutil.rmtree(vm_extra_disk_dir_path) return def update_db_after_vm_installation(vm_details, vm_properties, parent_id = None): """ Updates db after a vm is installed successfully """ logger.debug("Starting to update db after vm installation..") hostid = vm_properties['host'] datastore = vm_properties['datastore'] template_hdd = vm_properties['template'].hdd logger.debug("Inside update db after installation") logger.debug(vm_properties) # Updating the used entry of datastore current.db(current.db.datastore.id == datastore.id).update(used = int(datastore.used) + int(vm_details.extra_HDD) + int(template_hdd)) private_ip_id = current.db.private_ip_pool(private_ip=vm_properties['private_ip']).id public_ip_id = None if vm_properties['public_ip'] != None: public_ip_id = current.db.public_ip_pool(public_ip=vm_properties['public_ip']).id if parent_id: vm_status = current.VM_STATUS_SHUTDOWN else: vm_status = current.VM_STATUS_RUNNING # Update vm_data table current.db(current.db.vm_data.id == vm_details.id).update( host_id = hostid, extra_HDD = vm_details.extra_HDD, datastore_id = datastore.id, vnc_port = vm_properties['vnc_port'], private_ip = private_ip_id, public_ip = public_ip_id, start_time = get_datetime(), parent_id = parent_id, status = vm_status) logger.debug("Updated db") return def create_object_store(parameters,object_data): try: logger.debug("In create_object_store() function...") object_name=object_data['object_store_name'] size_limit=object_data['object_store_size'] sh_path = os.path.join(get_context_path(), 'private/object_storage.sh') command = 'sh %s %s %s' %(sh_path, object_name, str(size_limit)) logger.debug("command :%s" %command) file_name= object_data['object_store_name'] + "_key.txt" file_path = os.path.join(get_context_path(), 'private/Object_keys/' + file_name) cp = os.open(file_path,os.O_RDWR|os.O_CREAT) co = os.fdopen(cp,"rw+") fd = os.open('/home/key.txt',os.O_RDWR|os.O_CREAT) fo = os.fdopen(fd,"r+") key_s3_secret= fo.readline(); co.write(key_s3_secret); key_s3_access= fo.readline(); co.write(key_s3_access); key_swift_secret= fo.readline(); co.write(key_swift_secret); swift_user= 'Swift_user: ' + object_name + ':swift' co.write(swift_user) co.close() a,b,key_swift_secret= key_swift_secret.partition(' ') # @UnusedVariable a,b,key_s3_secret= key_s3_secret.partition(' ') # @UnusedVariable a,b,key_s3_access= key_s3_access.partition(' ') # @UnusedVariable #print key_s3_secret, key_s3_access , key_swift_secret object_data.update_record(swift_access_key= key_swift_secret.strip() , s3_secret_key= key_s3_secret.strip(), s3_access_key= key_s3_access.strip(), status=3) fo.close() message = "Object Store is created successfully." return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) # Installs a vm def install(parameters): """ Installs a vm """ vmid = parameters['vm_id'] logger.debug("In install() function...") vm_details = current.db.vm_data[vmid] vm_properties = None try: # Fetches vm details from vm_data table logger.debug("VM details are: " + str(vm_details)) # Calling allocate_vm_properties function vm_properties = allocate_vm_properties(vm_details) # Calling create_vm_image function (vm_properties['template'], vm_image_location) = create_vm_image(vm_details, vm_properties['datastore']) # Calling launch_vm_on_host attach_disk_status_message = launch_vm_on_host(vm_details, vm_image_location, vm_properties) # Checking if vm has been installed successfully assert(check_if_vm_defined(current.db.host[vm_properties['host']].host_ip.private_ip, vm_details.vm_identity)), "VM is not installed. Check logs." if vm_properties['public_ip_req']: create_mapping(vm_properties['public_ip'], vm_properties['private_ip']) # Update database after vm installation update_db_after_vm_installation(vm_details, vm_properties) message = "VM is installed successfully." + attach_disk_status_message logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: if vm_properties != None: _free_vm_properties(vm_details, vm_properties) logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def start(parameters): """ Starts a vm """ logger.debug("Inside start() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_RUNNING: raise Exception("VM is already running. Check vm status on host.") domain.create() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING) message = vm_details.vm_identity + " is started successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def suspend(parameters): """ Suspends a vm """ logger.debug("Inside suspend() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_PAUSED: raise Exception("VM is already paused. Check vm status on host.") domain.suspend() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SUSPENDED) message = vm_details.vm_identity + " is suspended successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def resume(parameters): """ Resumes a vm """ logger.debug("Inside resume() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_RUNNING: raise Exception("VM is already running. Check vm status on host.") domain.resume() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_RUNNING) message = vm_details.vm_identity + " is resumed successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def destroy(parameters): """ Destroys a vm forcefully """ logger.debug("Inside destroy() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_SHUTOFF: raise Exception("VM is already shutoff. Check vm status on host.") domain.destroy() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN) message = vm_details.vm_identity + " is destroyed successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def shutdown(parameters): """ Destroys a vm gracefully """ logger.debug("Inside shutdown() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: domain = getVirshDomain(vm_details) if domain.info()[0] == VIR_DOMAIN_SHUTOFF: raise Exception("VM is already shutoff. Check vm status on host.") domain.managedSave() current.db(current.db.vm_data.id == vm_id).update(status = current.VM_STATUS_SHUTDOWN) message = vm_details.vm_identity + " is shutdown successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def _clean_up_database_after_vm_deletion(vm_details): """ Cleans up database after vm deletion """ logger.debug("Inside clean up database after vm deletion () function...") # moving vm image folder to archives folder archive_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('archives_dir') if not os.path.exists(archive_directory_path): os.makedirs(archive_directory_path) source_file = vm_details.datastore_id.system_mount_point + '/' + get_constant('vms') + '/' + vm_details.vm_identity archive_filename = vm_details.vm_identity + str(get_datetime()) logger.debug(archive_filename) destination_file = archive_directory_path + '/' + archive_filename shutil.move(source_file, destination_file) # removing hdd vm_extra_disks_directory_path = vm_details.datastore_id.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ vm_details.datastore_id.ds_name + "/" + vm_details.vm_identity if os.path.exists(vm_extra_disks_directory_path): shutil.rmtree(vm_extra_disks_directory_path) # updating the used entry of database current.db(current.db.datastore.id == vm_details.datastore_id).update(used = int(vm_details.datastore_id.used) - \ (int(vm_details.extra_HDD) + int(vm_details.template_id.hdd))) # updating task_queue_event entry to remove reference of VM current.db(current.db.task_queue_event.vm_id == vm_details.id).update(vm_id = None) # deleting entry of extra disk of vm current.db(current.db.attached_disks.vm_id == vm_details.id).delete() logger.debug("Database cleaned") def vm_has_snapshots(vm_id): """ Checks if a vm has snapshot(s) """ if (current.db(current.db.snapshot.vm_id == vm_id).select()): return True else: return False def delete(parameters): """ Deletes a vm """ logger.debug("Inside delete() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) logger.debug(str(vm_details.status)) if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED): logger.debug("Vm is not shutoff. Shutting it off first.") domain.destroy() logger.debug("Starting to delete it...") domain.undefineFlags(VIR_DOMAIN_UNDEFINE_SNAPSHOTS_METADATA ) if vm_details.public_ip: remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip) message = vm_details.vm_identity + " is deleted successfully." logger.debug(message) _clean_up_database_after_vm_deletion(vm_details) current.db(current.db.vm_data.id == vm_id).delete() current.db.commit() logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration): """ Migrate domain with snapshots """ # XML dump of snapshot(s) of the vm logger.debug("Starting to take xml dump of the snapshot(s) of the vm... ") if not os.path.exists(vm_backup_during_migration): os.makedirs(vm_backup_during_migration) for domain_snapshot in domain_snapshots_list: logger.debug("snapshot name is " + str(domain_snapshot)) dump_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot snapshot_dumpxml_command = 'virsh snapshot-dumpxml %s %s > %s' % ( vm_details.vm_identity, domain_snapshot, dump_xml_path) logger.debug("Taking xml dump of" + str(domain_snapshot)) command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_dumpxml_command) logger.debug(command_output) logger.debug("XML dump of " + str(domain_snapshot) + "succeeded.") # Delete snapshot(s) of the vm and migrate it to destination host logger.debug("Starting to delete snapshots of the vm....") for domain_snapshot in domain_snapshots_list: snapshot = domain.snapshotLookupByName(domain_snapshot, 0) snapshot.delete(0) logger.debug("Migrating the vm to destination host...") domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0) # Redefine all the snapshot(s) of the vm on the destination host and set current snapshot logger.debug("Starting to redefine all the snapshot(s) of the domain...") for domain_snapshot in domain_snapshots_list: redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path) command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_redefine_command) logger.debug(command_output) snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name) command_output = execute_remote_cmd(destination_host_ip, 'root', snapshot_current_command) logger.debug(command_output) return def _clean_migration_directory(vm_backup_during_migration): """ Delete directory created for storing dumpxml of vm snapshots """ if os.path.exists(vm_backup_during_migration): shutil.rmtree(vm_backup_during_migration) return def undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration): """ Undo the migration """ if domain_snapshots_list: # Redefine the snapshots of the vm on the source host logger.debug("Starting to redefine all the snapshot(s) of the vm on the source host...") for domain_snapshot in domain_snapshots_list: redefine_xml_path = vm_backup_during_migration + '/' + 'dump_' + domain_snapshot snapshot_redefine_command = 'virsh snapshot-create --redefine %s %s ' % (vm_details.vm_identity, redefine_xml_path) command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_redefine_command, None, True) logger.debug(command_output) snapshot_current_command = 'virsh snapshot-current %s %s' % (vm_details.vm_identity, current_snapshot_name) command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', snapshot_current_command, None, True) logger.debug(command_output) # Delete directory created for storing dumpxml of vm snapshots _clean_migration_directory(vm_backup_during_migration) return def migrate_domain(vm_id, destination_host_id=None, live_migration=False): """ Migrate domain """ vm_details = current.db.vm_data[vm_id] domain_snapshots_list = [] current_snapshot_name = '' vm_migration_directory = get_constant('vm_migration_data') vm_backup_during_migration = vm_details.datastore_id.system_mount_point + '/' + vm_migration_directory + '/' + \ vm_details.vm_identity if destination_host_id == None: destination_host_id = find_new_host(vm_details.RAM, vm_details.vCPU) destination_host_ip = current.db.host[destination_host_id].host_ip.private_ip flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_UNSAFE if live_migration: flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_LIVE if vm_details.status == current.VM_STATUS_SUSPENDED: logger.debug("Vm is suspended") flags |= VIR_MIGRATE_TUNNELLED|VIR_MIGRATE_PAUSED elif vm_details.status == current.VM_STATUS_SHUTDOWN: logger.debug("Vm is shut off") flags |= VIR_MIGRATE_OFFLINE logger.debug("Flags: " + str(flags)) try: domain = getVirshDomain(vm_details) dom_snapshot_names = domain.snapshotListNames(0) for snapshot in current.db(current.db.snapshot.vm_id == vm_id).select(): logger.debug("snapshot:" + str(snapshot.snapshot_name)) domain_snapshots_list.append(snapshot.snapshot_name) dom_snapshot_names.remove(snapshot.snapshot_name) logger.debug("domain snapshot list is " + str(domain_snapshots_list)) for dom_snapshot in dom_snapshot_names: logger.debug("Deleting orphan snapshot %s" %(dom_snapshot)) snapshot = domain.snapshotLookupByName(dom_snapshot, 0) snapshot.delete(0) if domain_snapshots_list: current_snapshot = domain.snapshotCurrent(0) current_snapshot_name = current_snapshot.getName() migrate_domain_with_snapshots(vm_details, destination_host_ip, domain, domain_snapshots_list, current_snapshot_name, flags, vm_backup_during_migration) else: domain.migrateToURI("qemu+ssh://root@" + destination_host_ip + "/system", flags , None, 0) vm_details.update_record(host_id = destination_host_id) current.db.commit() # Delete directory created for storing dumpxml of vm snapshot _clean_migration_directory(vm_backup_during_migration) message = vm_details.vm_identity + " is migrated successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: undo_migration(vm_details, domain_snapshots_list, current_snapshot_name, vm_backup_during_migration) logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def migrate_domain_datastore(vmid, destination_datastore_id, live_migration=False): """ Migrate VM domain from one datastore to another. - Copy VM Image to new datastore - Update VM XML definition - Update database """ logger.debug(sys.path) vm_details = current.db.vm_data[vmid] # datastore_id = vm_details["datastore_id"] logger.debug("Inside live disk migration block") try: (connection_object, domain) = getVirshDomainConn(vm_details) datastore = current.db.datastore[destination_datastore_id] vm_directory_path = datastore.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity logger.debug("Creating vm directory on other datastore...") if not os.path.exists (vm_directory_path): os.makedirs(vm_directory_path) diskpath = vm_directory_path + '/' + vm_details.vm_identity + '.qcow2' current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2' logger.debug(current_disk_file) xmlfile = domain.XMLDesc(0) if(live_migration==False): rc = os.system("cp %s %s" % (current_disk_file, diskpath)) if rc != 0: logger.error("Copy not successful") raise Exception("Copy not successful") else: logger.debug("Copied successfully") else: if domain.isActive: domain.undefine() root = etree.fromstring(xmlfile) target_elem = root.find("devices/disk/target") target_disk = target_elem.get('dev') # # destxml = generate_blockcopy_xml(diskpath,target_disk) flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY domain.blockRebase(target_disk, diskpath, 0, flag) block_info_list = domain.blockJobInfo(current_disk_file,0) while(block_info_list['end'] != block_info_list['cur']): logger.debug("time to sleep") time.sleep(60) block_info_list = domain.blockJobInfo(current_disk_file,0) domain.blockJobAbort(current_disk_file, VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT) source_elem = root.find("devices/disk/source") source_elem.set('file',diskpath) newxml_file = etree.tostring(root) domain = connection_object.defineXML(newxml_file) vm_details.update_record(datastore_id=destination_datastore_id) if os.path.exists (diskpath): os.remove(current_disk_file) restore_symboltable_path = current_disk_path+"/restore_symboltable" if os.path.exists (restore_symboltable_path): logger.debug(restore_symboltable_path) os.remove(restore_symboltable_path) os.rmdir(current_disk_path) connection_object.close() message = vm_details.vm_identity + " is migrated successfully to new datastore." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: #undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id) connection_object.close() logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id): """ Undo migration in case of any issue """ # undo databse changes vm_details.update_record(datastore_id=datastore_id) if domain.isActive: logger.debug("domain is active") block_info_list = domain.blockJobInfo(current_disk_file,0) if(bool(block_info_list) == True): while(block_info_list['end'] != block_info_list['cur']): logger.debug("time to sleep") time.sleep(60) block_info_list = domain.blockJobInfo(current_disk_file,0) if(block_info_list['end'] == block_info_list['cur']): domain.blockJobAbort(current_disk_file) block_info_list = domain.blockJobInfo(current_disk_file,0) if os.path.exists (diskpath): os.remove(diskpath) os.rmdir(vm_directory_path) def migrate(parameters): """ Migrates VM to new host """ vmid = parameters['vm_id'] logger.debug("Inside migrate() function for vm_id: "+str(vmid)) destination_host_id = parameters['destination_host'] if parameters['live_migration'] == 'on': live_migration = True else: live_migration = False return migrate_domain(vmid, destination_host_id, live_migration) def migrate_datastore(parameters): """ Migrates VM to new datastore """ logger.debug("Inside migrate_datastore() function") vmid = parameters['vm_id'] destination_ds_id = parameters['destination_ds'] if parameters['live_migration'] == 'on': live_migration = True else: live_migration = False return migrate_domain_datastore(vmid, destination_ds_id, live_migration) def snapshot(parameters): """ Snapshots a vm """ logger.debug("Inside snapshot() function") vm_id = parameters['vm_id'] snapshot_type = parameters['snapshot_type'] try: vm_details = current.db.vm_data[vm_id] if is_pingable(str(vm_details.private_ip.private_ip)): logger.debug("VM is pingable. Starting to start with snapshotting...") if snapshot_type != current.SNAPSHOT_USER: snapshots = current.db((current.db.snapshot.vm_id == vm_id) & (current.db.snapshot.type == snapshot_type)).select() #Delete the existing Daily/Monthly/Yearly snapshot for snapshot_cron in snapshots: logger.debug(snapshot_cron) delete_snapshot({'vm_id':vm_id, 'snapshot_id':snapshot_cron.id}) snapshot_name = get_datetime().strftime("%I:%M%p_%B%d,%Y") domain = getVirshDomain(vm_details) xmlDesc = "<domainsnapshot><name>%s</name></domainsnapshot>" % (snapshot_name) domain.snapshotCreateXML(xmlDesc, 0) message = "Snapshotted successfully." current.db.snapshot.insert(vm_id = vm_id, datastore_id = vm_details.datastore_id, snapshot_name = snapshot_name, type = snapshot_type) logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: message = "Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip) raise Exception("Unable to ping VM before snapshoting: %s" % (vm_details.private_ip.private_ip)) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def revert(parameters): """ Reverts to snapshot """ logger.debug("Inside revert snapshot() function") vm_id = parameters['vm_id'] snapshotid = parameters['snapshot_id'] vm_details = current.db.vm_data[vm_id] try: domain = getVirshDomain(vm_details) snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name'] snapshot = domain.snapshotLookupByName(snapshot_name, 0) domain.revertToSnapshot(snapshot, 0) message = "Reverted to snapshot successfully." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def delete_snapshot(parameters): """ Deletes a snapshot """ logger.debug("Inside delete snapshot() function") vm_id = parameters['vm_id'] snapshotid = parameters['snapshot_id'] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: domain = getVirshDomain(vm_details) snapshot_name = current.db(current.db.snapshot.id == snapshotid).select().first()['snapshot_name'] snapshot = None try: snapshot = domain.snapshotLookupByName(snapshot_name, 0) except libvirtError: logger.debug("Snapshot %s not found" %(snapshot_name)) if snapshot != None: snapshot.delete(0) message = "Deleted snapshot successfully." logger.debug(message) current.db(current.db.snapshot.id == snapshotid).delete() logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def update_security_domain(vm_details, security_domain_id, xmlDesc=None): """ Get new IP for given security domain. Update the VM XML with new mac_address and update the information in DB """ # fetch new private IP from db from given security domain private_ip_info = _get_private_ip_mac(security_domain_id) # update vm config to add new mac address. root = etree.fromstring(xmlDesc) mac_elem = root.find("devices/interface[@type='bridge']/mac") mac_elem.set('address', private_ip_info.mac_addr) vlan_tag_elem = root.find("devices/interface[@type='bridge']/vlan/tag") vlan_tag_elem.set('id', private_ip_info.vlan.vlan_tag) # update NAT IP mapping, if public IP present if vm_details.public_ip: remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip) create_mapping(vm_details.public_ip.public_ip, private_ip_info.private_ip) # update vm_data current.db(current.db.vm_data.id == vm_details.id).update(security_domain = security_domain_id, private_ip = private_ip_info.id) return etree.tostring(root) def edit_vm_config(parameters): """ Edits vm configuration """ logger.debug("Inside edit vm config() function") vm_id = parameters['vm_id'] vm_details = current.db.vm_data[vm_id] message = "" try: connection_object, domain = getVirshDomainConn(vm_details) if 'vcpus' in parameters: new_vcpus = int(parameters['vcpus']) domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_VCPU_MAXIMUM) domain.setVcpusFlags(new_vcpus, VIR_DOMAIN_AFFECT_CONFIG) message += "Edited vCPU successfully." current.db(current.db.vm_data.id == vm_id).update(vCPU = new_vcpus) if 'ram' in parameters: new_ram = int(parameters['ram']) * 1024 logger.debug(str(new_ram)) domain.setMemoryFlags(new_ram, VIR_DOMAIN_MEM_MAXIMUM) domain.setMemoryFlags(new_ram, VIR_DOMAIN_AFFECT_CONFIG) message += " And edited RAM successfully." current.db(current.db.vm_data.id == vm_id).update(RAM = int(parameters['ram'])) if 'public_ip' in parameters: enable_public_ip = parameters['public_ip'] if enable_public_ip: public_ip_pool = _choose_random_public_ip() if public_ip_pool: create_mapping(public_ip_pool.public_ip, vm_details.private_ip.private_ip) current.db.vm_data[vm_id] = dict(public_ip=public_ip_pool.id) message += "Edited Public IP successfully." else: raise Exception("Available Public IPs are exhausted.") else: remove_mapping(vm_details.public_ip.public_ip, vm_details.private_ip.private_ip) current.db.vm_data[vm_id] = dict(public_ip = None) if 'security_domain' in parameters: logger.debug('Updating security domain') xmlfile = update_security_domain(vm_details, parameters['security_domain'], domain.XMLDesc(0)) domain = connection_object.defineXML(xmlfile) if domain.isActive(): domain.reboot(0) message += "Edited security domain successfully" connection_object.close() logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def _get_clone_properties(vm_details, cloned_vm_details, vm_properties): """ Get properties for Cloned VM. """ datastore = _choose_datastore() vm_properties['datastore'] = datastore logger.debug("Datastore selected is: " + str(datastore)) vm_properties['security_domain'] = vm_details.security_domain vm_properties['public_ip_req'] = False # Finds mac address, ip address and vnc port for the cloned vm _choose_mac_ip_vncport(vm_properties) logger.debug("MAC is : " + str(vm_properties['mac_addr']) + " IP is : " + str(vm_properties['private_ip']) + \ " VNCPORT is : " + str(vm_properties['vnc_port'])) # Template and host of parent vm vm_properties['template'] = current.db(current.db.template.id == vm_details.template_id).select()[0] vm_properties['vm_host_details'] = current.db.host[vm_details.host_id] vm_properties['host'] = vm_properties['vm_host_details'].id # Creates a directory for the cloned vm logger.debug("Creating directory for cloned vm...") cloned_vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + cloned_vm_details.vm_identity if not os.path.exists (cloned_vm_directory_path): os.makedirs(cloned_vm_directory_path) clone_file_parameters = ' --file ' + cloned_vm_directory_path + '/' + cloned_vm_details.vm_identity + '.qcow2' else: raise Exception("Directory with same name as vmname already exists.") # Creates a folder for additional disks of the cloned vm vm = current.db(current.db.vm_data.vm_identity == vm_details.vm_identity).select().first() disk_details_of_cloning_vm = current.db(current.db.attached_disks.vm_id == vm.id).select(orderby=current.db.attached_disks.attached_disk_name) logger.debug(disk_details_of_cloning_vm) already_attached_disks = len(disk_details_of_cloning_vm) cloned_vm_extra_disks_directory = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ datastore.ds_name + '/' + cloned_vm_details.vm_identity if already_attached_disks > 0: if not os.path.exists (cloned_vm_extra_disks_directory): logger.debug("Making Directory") os.makedirs(cloned_vm_extra_disks_directory) count = already_attached_disks while already_attached_disks > 0: disk_name = cloned_vm_details.vm_identity + '_disk' + str(count - already_attached_disks + 1) + '.qcow2' clone_file_parameters += ' --file ' + cloned_vm_extra_disks_directory + '/' + disk_name current.db.attached_disks.insert(vm_id = cloned_vm_details.id, datastore_id = datastore.id , attached_disk_name = disk_name, capacity = disk_details_of_cloning_vm[count - already_attached_disks].capacity) already_attached_disks -= 1 return (clone_file_parameters) def migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties): """ Migrates cloned vm to new host """ try: new_host_ip_for_cloned_vm = current.db.host[new_host_id_for_cloned_vm].host_ip.private_ip logger.debug("New host ip for cloned vm is: " + str(new_host_ip_for_cloned_vm)) flags = VIR_MIGRATE_PEER2PEER|VIR_MIGRATE_PERSIST_DEST|VIR_MIGRATE_UNDEFINE_SOURCE|VIR_MIGRATE_OFFLINE|VIR_MIGRATE_UNSAFE logger.debug("Clone currently on: " + str(vm_details.host_id.host_ip)) (current_host_connection_object, domain) = getVirshDomainConn(None, vm_details.host_id.host_ip, cloned_vm_details.vm_identity) logger.debug("Starting to migrate cloned vm to host " + str(new_host_ip_for_cloned_vm)) domain.migrateToURI("qemu+ssh://root@" + new_host_ip_for_cloned_vm + "/system", flags , None, 0) current_host_connection_object.close() logger.debug("Successfully migrated cloned vm to host " + str(new_host_ip_for_cloned_vm)) cloned_vm_details.update_record(host_id = new_host_id_for_cloned_vm) vm_properties['host'] = new_host_id_for_cloned_vm return True except libvirt.libvirtError,e: message = e.get_error_message() logger.debug("Error: " + message) return False def clone(vmid): """ Clones vm """ vm_properties = {} logger.debug("Inside clone() function") cloned_vm_details = current.db.vm_data[vmid] vm_details = current.db(current.db.vm_data.id == cloned_vm_details.parent_id).select().first() try: domain = getVirshDomain(vm_details) if domain.info()[0] != VIR_DOMAIN_SHUTOFF: raise Exception("VM is not shutoff. Check vm status.") clone_file_parameters = _get_clone_properties(vm_details, cloned_vm_details, vm_properties) logger.debug("cloned vm properties after clone_file_parameters" + str(vm_properties)) host = vm_properties['vm_host_details'] logger.debug("host is: " + str(host)) logger.debug("host details are: " + str(host)) (used_ram, used_cpu) = host_resources_used(host.id) logger.debug("uram: " + str(used_ram) + " used_cpu: " + str(used_cpu) + " host ram: " + str(host.RAM) +" host cpu: " + str(host.CPUs)) host_ram_after_200_percent_overcommitment = math.floor((host.RAM * 1024) * 2) host_cpu_after_200_percent_overcommitment = math.floor(host.CPUs * 2) logger.debug("host_ram_after_200_percent_overcommitment in MB " + str(host_ram_after_200_percent_overcommitment)) logger.debug("host_cpu_after_200_percent_overcommitment " + str(host_cpu_after_200_percent_overcommitment)) logger.debug("Available RAM on host: %s, Requested RAM: %s" % ((host_ram_after_200_percent_overcommitment - used_ram), vm_details.RAM)) logger.debug("Available CPUs on host: %s, Requested CPU: %s " % ((host_cpu_after_200_percent_overcommitment - used_cpu), vm_details.vCPU)) if((( host_ram_after_200_percent_overcommitment - used_ram) >= vm_details.RAM) and ((host_cpu_after_200_percent_overcommitment - used_cpu) >= vm_details.vCPU) and (vm_details.vCPU <= host.CPUs)): clone_command = "virt-clone --original " + vm_details.vm_identity + " --name " + cloned_vm_details.vm_identity + \ clone_file_parameters + " --mac " + vm_properties['mac_addr'] command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', clone_command, None, True) logger.debug(command_output) logger.debug("Updating db after cloning") update_db_after_vm_installation(cloned_vm_details, vm_properties, parent_id = vm_details.id) message = "Cloned successfully. " try: new_host_id_for_cloned_vm = find_new_host(cloned_vm_details.RAM, cloned_vm_details.vCPU) if new_host_id_for_cloned_vm != host.id: if migrate_clone_to_new_host(vm_details, cloned_vm_details, new_host_id_for_cloned_vm,vm_properties): message += "Found new host and migrated successfully." else: message += "Found new host but not migrated successfully." else: message += "New host selected to migrate cloned vm is same as the host on which it currently resides." except: message += "Could not find host to migrate cloned vm." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: raise Exception("Host resources exhausted. Migrate the host vms and then try.") except: _free_vm_properties(cloned_vm_details, vm_properties) logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def attach_extra_disk(parameters): """ Attaches extra disk to VM """ logger.debug("Inside attach extra disk() function") vmid = parameters['vm_id'] disk_size = parameters['disk_size'] vm_details = current.db.vm_data[vmid] logger.debug(str(vm_details)) try: if (serve_extra_disk_request(vm_details, disk_size, vm_details.host_id.host_ip.private_ip)): current.db(current.db.vm_data.id == vmid).update(extra_HDD = vm_details.extra_HDD + disk_size) message = "Attached extra disk successfully" logger.debug(message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: message = " Your request for additional HDD could not be completed at this moment. Check logs." logger.debug("Task Status: SUCCESS Message: %s " % message) return (current.TASK_QUEUE_STATUS_FAILED, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def get_vm_image_location(datastore_id, vm_identity): """ Get the file path for qcow2 image of a VM """ datastore = current.db.datastore[datastore_id] vm_directory_path = datastore.system_mount_point + '/' + get_constant('vms') + '/' + vm_identity vm_image_name = vm_directory_path + '/' + vm_identity + '.qcow2' image_present = True if os.path.exists(vm_image_name) else False return (vm_image_name, image_present) def get_extra_disk_location(datastore_id, vm_identity, disk_name, get_disk_size=False): """ Get the file path for qcow2 image of teh extra disk """ datastore = current.db.datastore[datastore_id] if datastore: vm_extra_disks_directory_path = datastore.system_mount_point + '/' + get_constant('extra_disks_dir') + '/' + \ datastore.ds_name + '/' + vm_identity ext = '' if disk_name.endswith('.qcow2') else '.qcow2' disk_image_path = vm_extra_disks_directory_path + '/' + disk_name + ext image_present = True if os.path.exists(disk_image_path) else False disk_size = 0 if image_present & get_disk_size: command = "qemu-img info " + disk_image_path + " | grep 'virtual size'" ret = os.popen(command).read() # Returns e.g. virtual size: 40G (42949672960 bytes) disk_size = int(ret[ret.index(':')+1:ret.index('G ')].strip()) return (disk_image_path, image_present, disk_size) else: return (None, False, 0) def launch_existing_vm_image(vm_details): """ Launch existing VM image - Choose new private_ip & mac_addr if not provided - Get location for VM image - Launch VM on given host - Attach extra disk to VM if defined - Create mapping between public IP and private IP if required """ logger.debug('Launch existing VM image') vm_properties = {} vm_properties['ram'] = vm_details.RAM vm_properties['vcpus'] = vm_details.vCPU vm_properties['security_domain'] = vm_details.security_domain #If Private IP was already chosen previously and DHCP entry is done if vm_details.private_ip != None: private_ip_info = current.db.private_ip_pool[vm_details.private_ip] if private_ip_info: vm_properties['private_ip'] = private_ip_info.private_ip vm_properties['mac_addr'] = private_ip_info.mac_addr vm_properties['vlan_name'] = private_ip_info.vlan.name vm_properties['vlan_tag'] = private_ip_info.vlan.vlan_tag if vm_details.public_ip == None: vm_properties['public_ip_req'] = False else: vm_properties['public_ip_req'] = True if vm_details.public_ip.is_active: vm_properties['public_ip'] = vm_details.public_ip.public_ip _choose_mac_ip_vncport(vm_properties) vm_properties['template'] = current.db.template[vm_details.template_id] vm_properties['datastore'] = current.db.datastore[vm_details.datastore_id] vm_properties['host'] = find_new_host(vm_details.RAM, vm_details.vCPU) (vm_image_name, image_present) = get_vm_image_location(vm_details.datastore_id, vm_details.vm_identity) if image_present: launch_vm_on_host(vm_details, vm_image_name, vm_properties) #Check if extra disk needs to be attached attached_disks = current.db((current.db.attached_disks.vm_id == vm_details.id)).select() if attached_disks: #Extra disk to be attached to the VM host_ip = current.db.host[vm_properties['host']].host_ip.private_ip disk_counter = 1 for attached_disk in attached_disks: disk_size = attach_disk(vm_details, attached_disk.attached_disk_name, host_ip, disk_counter, True) current.db(current.db.attached_disks.vm_id == attached_disk.vm_id and current.db.attached_disks.attached_disk_name==attached_disk.attached_disk_name ).update(capacity = disk_size) vm_details.extra_HDD += disk_size disk_counter += 1 #Create mapping of Private_IP and Public_IP if vm_properties['public_ip_req']: create_mapping(vm_properties['public_ip'], vm_properties['private_ip']) update_db_after_vm_installation(vm_details, vm_properties) def save_vm_as_template(parameters): """ Save VM as template If template for given VM already exists, replace with new template. """ logger.debug("Inside save_as_template() function") vm_id = parameters['vm_id'] vm_data = current.db.vm_data[vm_id] user_list = [] vm_details = current.db.vm_data[vm_id] logger.debug(str(vm_details)) try: (is_templated_created, new_template, old_template) = create_new_template(vm_details) if (is_templated_created): #remove old template if os.path.exists (old_template): os.remove(old_template) else: for user in current.db(current.db.user_vm_map.vm_id == vm_id).select(current.db.user_vm_map.user_id): user_list.append(user.user_id) new_template_id = current.db.template.insert(name = vm_data.vm_name + "_template" , os = vm_data.template_id.os , os_name = vm_data.template_id.os_name , os_version = vm_data.template_id.os_version , os_type = vm_data.template_id.os_type , arch = vm_data.template_id.arch , hdd = vm_data.template_id.hdd , hdfile = new_template , type = vm_data.template_id.type , tag = vm_data.vm_name + "_template" , datastore_id = vm_data.template_id.datastore_id, owner = user_list) current.db.vm_data[vm_id] = dict(saved_template = new_template_id) message = "User Template saved successfully" logger.debug(message) return (current.TASK_QUEUE_STATUS_SUCCESS, message) else: message = " Vm Template not saved " logger.debug("Task Status: %s " % message) return (current.TASK_QUEUE_STATUS_FAILED, message) except: logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (current.TASK_QUEUE_STATUS_FAILED, log_exception()) def delete_template(parameters): """ Delete template """ logger.debug("Inside delete_template() function") template_id = parameters['template_id'] template_details = current.db.template[template_id] template_path = template_details["hdfile"] if os.path.exists(template_path): os.remove(template_path) # set value in db also parent_vm = current.db.vm_data(saved_template = template_id) if parent_vm: parent_vm.update_record(saved_template = None) del current.db.template[template_id] return (current.TASK_QUEUE_STATUS_SUCCESS, "") def create_new_template(vm_details): """ Create a new template from the VM image - Create template directory - Copy VM Image to directory(Live copy if VM is running) - Update database to define new template """ try: (connection_object, domain) = getVirshDomainConn(vm_details) xmlfile = domain.XMLDesc(0) logger.debug("connection object created") datastore = _choose_datastore() logger.debug(datastore) new_template_dir = datastore.system_mount_point + '/' +get_constant('templates_dir') + '/' + vm_details.requester_id.first_name logger.debug("Creating user template directory...") if not os.path.exists (new_template_dir): os.makedirs(new_template_dir) template = new_template_dir + '/' + vm_details.vm_identity + '_template.qcow2' template_location = '/' + vm_details.requester_id.first_name + '/' + vm_details.vm_identity + '_template.qcow2' old_template = new_template_dir + '/' + vm_details.vm_identity + '_template_old.qcow2' if os.path.exists (template): # move template to some other path logger.debug("move template to some other file") shutil.move(template, old_template) logger.debug("template " + template) current_disk_path = vm_details.datastore_id.system_mount_point + get_constant('vms') + '/' + vm_details.vm_identity current_disk_file = current_disk_path + '/' + vm_details.vm_identity + '.qcow2' if (vm_details.status == current.VM_STATUS_RUNNING or vm_details.status == current.VM_STATUS_SUSPENDED): logger.debug("vm is active in db") if domain.isActive(): domain.undefine() root = etree.fromstring(xmlfile) target_elem = root.find("devices/disk/target") target_disk = target_elem.get('dev') flag = VIR_DOMAIN_BLOCK_REBASE_SHALLOW | VIR_DOMAIN_BLOCK_REBASE_COPY domain.blockRebase(target_disk, template, 0, flag) block_info_list = domain.blockJobInfo(current_disk_file,0) while(block_info_list['end'] != block_info_list['cur']): logger.debug("time to sleep") time.sleep(60) block_info_list = domain.blockJobInfo(current_disk_file,0) domain.blockJobAbort(current_disk_file) domain = connection_object.defineXML(xmlfile) connection_object.close() return (True, template_location, old_template) else: logger.debug("domain is not running on host") return (False, template_location, old_template) elif(vm_details.status == current.VM_STATUS_SHUTDOWN): if domain.isActive(): logger.debug("Domain is still active...Please try again after some time!!!") return (False, template_location, old_template) else: logger.debug("copying") copy_command = "cp "+current_disk_file+" "+template logger.debug("copy_command"+copy_command) #rc = os.system("cp %s %s" % (current_disk_file, template)) logger.debug("copy command running on " + vm_details.host_id.host_ip.private_ip + " host") command_output = execute_remote_cmd(vm_details.host_id.host_ip.private_ip, 'root', copy_command) logger.debug(command_output) return (True, template_location, old_template) except: if not domain.isPersistent(): domain = connection_object.defineXML(xmlfile) connection_object.close() logger.debug("Task Status: FAILED Error: %s " % log_exception()) return (False, template_location, old_template)
en
0.737981
# -*- coding: utf-8 -*- ################################################################################### # @UnusedWildImport Chooses datastore from a list of available datastores # datastore_capacity = current.db(current.db.datastore.id >= 0).select(orderby = current.db.datastore.used Returns resources utilization of a host in MB, Count Generic method to establish libvirt connection Generic method to establish libvirt connection Set the vlan tag in network configuration of VM This is required to ensure that VM fetches IP of its vlan from DHCP Chooses a random Private IP from the pool, such that: - It is not assigned to any VM or host - It belongs to VLAN of given security domain Chooses a random Public IP from the pool, such that: - It is not assigned to any VM - It is not assigned to any host - IP is marked active. Chooses mac address and ip address for a vm to be installed. It also chooses a random public IP if requested Chooses mac address, ip address and vncport for a vm to be installed Select a random host from list of 3 hosts with available RAM and CPU Availability is checked with 200 percent over-commitment. #Sort selected host list by Ram first then Cpu #If no suitable host found Allocates vm properties ( datastore, host, ip address, mac address, vnc port, ram, vcpus) Create a VM image - Creates a directory for the new VM using vm_identity - Find the location of template image requested for - Copy the template image from its location to new vm directory # Creates a directory for the new vm # Finds the location of template image that the user has requested for its vm. # Copies the template image from its location to new vm directory #template_dir = get_constant('vm_templates_datastore') Generates install command for vm Generates xml for defining new disk Create extra disk image Attach given disk to the VM #already_attached_disks = len(current.db(current.db.attached_disks.vm_id == vm.id).select()) # Attaching disk to vm using libvirt API Serves extra disk request and updates db Launches a vm image on host # Starts installing a vm # Serving HDD request Checks if a newly created vm is successfully defined Frees vm properties in-case installation has failed mid-way Updates db after a vm is installed successfully # Updating the used entry of datastore # Update vm_data table # @UnusedVariable # @UnusedVariable # @UnusedVariable #print key_s3_secret, key_s3_access , key_swift_secret # Installs a vm Installs a vm # Fetches vm details from vm_data table # Calling allocate_vm_properties function # Calling create_vm_image function # Calling launch_vm_on_host # Checking if vm has been installed successfully # Update database after vm installation Starts a vm Suspends a vm Resumes a vm Destroys a vm forcefully Destroys a vm gracefully Cleans up database after vm deletion # moving vm image folder to archives folder # removing hdd # updating the used entry of database # updating task_queue_event entry to remove reference of VM # deleting entry of extra disk of vm Checks if a vm has snapshot(s) Deletes a vm Migrate domain with snapshots # XML dump of snapshot(s) of the vm # Delete snapshot(s) of the vm and migrate it to destination host # Redefine all the snapshot(s) of the vm on the destination host and set current snapshot Delete directory created for storing dumpxml of vm snapshots Undo the migration # Redefine the snapshots of the vm on the source host # Delete directory created for storing dumpxml of vm snapshots Migrate domain # Delete directory created for storing dumpxml of vm snapshot Migrate VM domain from one datastore to another. - Copy VM Image to new datastore - Update VM XML definition - Update database # datastore_id = vm_details["datastore_id"] # # destxml = generate_blockcopy_xml(diskpath,target_disk) #undo_datastore_migration(vm_details, domain, diskpath, current_disk_file, vm_directory_path, datastore_id) Undo migration in case of any issue # undo databse changes Migrates VM to new host Migrates VM to new datastore Snapshots a vm #Delete the existing Daily/Monthly/Yearly snapshot Reverts to snapshot Deletes a snapshot Get new IP for given security domain. Update the VM XML with new mac_address and update the information in DB # fetch new private IP from db from given security domain # update vm config to add new mac address. # update NAT IP mapping, if public IP present # update vm_data Edits vm configuration Get properties for Cloned VM. # Finds mac address, ip address and vnc port for the cloned vm # Template and host of parent vm # Creates a directory for the cloned vm # Creates a folder for additional disks of the cloned vm Migrates cloned vm to new host Clones vm Attaches extra disk to VM Get the file path for qcow2 image of a VM Get the file path for qcow2 image of teh extra disk # Returns e.g. virtual size: 40G (42949672960 bytes) Launch existing VM image - Choose new private_ip & mac_addr if not provided - Get location for VM image - Launch VM on given host - Attach extra disk to VM if defined - Create mapping between public IP and private IP if required #If Private IP was already chosen previously and DHCP entry is done #Check if extra disk needs to be attached #Extra disk to be attached to the VM #Create mapping of Private_IP and Public_IP Save VM as template If template for given VM already exists, replace with new template. #remove old template Delete template # set value in db also Create a new template from the VM image - Create template directory - Copy VM Image to directory(Live copy if VM is running) - Update database to define new template # move template to some other path #rc = os.system("cp %s %s" % (current_disk_file, template))
2.006606
2
third_party/webrtc/src/chromium/src/build/android/devil/android/sdk/aapt.py
bopopescu/webrtc-streaming-node
8
9878
<reponame>bopopescu/webrtc-streaming-node # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """This module wraps the Android Asset Packaging Tool.""" import os from devil.utils import cmd_helper from pylib import constants _AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt') def _RunAaptCmd(args): """Runs an aapt command. Args: args: A list of arguments for aapt. Returns: The output of the command. """ cmd = [_AAPT_PATH] + args status, output = cmd_helper.GetCmdStatusAndOutput(cmd) if status != 0: raise Exception('Failed running aapt command: "%s" with output "%s".' % (' '.join(cmd), output)) return output def Dump(what, apk, assets=None): """Returns the output of the aapt dump command. Args: what: What you want to dump. apk: Path to apk you want to dump information for. assets: List of assets in apk you want to dump information for. """ assets = assets or [] if isinstance(assets, basestring): assets = [assets] return _RunAaptCmd(['dump', what, apk] + assets).splitlines()
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """This module wraps the Android Asset Packaging Tool.""" import os from devil.utils import cmd_helper from pylib import constants _AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt') def _RunAaptCmd(args): """Runs an aapt command. Args: args: A list of arguments for aapt. Returns: The output of the command. """ cmd = [_AAPT_PATH] + args status, output = cmd_helper.GetCmdStatusAndOutput(cmd) if status != 0: raise Exception('Failed running aapt command: "%s" with output "%s".' % (' '.join(cmd), output)) return output def Dump(what, apk, assets=None): """Returns the output of the aapt dump command. Args: what: What you want to dump. apk: Path to apk you want to dump information for. assets: List of assets in apk you want to dump information for. """ assets = assets or [] if isinstance(assets, basestring): assets = [assets] return _RunAaptCmd(['dump', what, apk] + assets).splitlines()
en
0.81449
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. This module wraps the Android Asset Packaging Tool. Runs an aapt command. Args: args: A list of arguments for aapt. Returns: The output of the command. Returns the output of the aapt dump command. Args: what: What you want to dump. apk: Path to apk you want to dump information for. assets: List of assets in apk you want to dump information for.
2.458751
2
examples/Tests/Misc/Resources/PythonFile/basic.py
esayui/mworks
0
9879
<reponame>esayui/mworks setvar('nsamples', getvar('a') + getvar('b'))
setvar('nsamples', getvar('a') + getvar('b'))
none
1
1.063795
1
quacc/recipes/psi4/core.py
arosen93/HT-ASE
9
9880
<filename>quacc/recipes/psi4/core.py """Core recipes for Psi4""" from __future__ import annotations from dataclasses import dataclass from typing import Any, Dict from ase.atoms import Atoms from ase.calculators.psi4 import Psi4 from jobflow import Maker, job from monty.dev import requires try: import psi4 except: psi4 = None from quacc.schemas.calc import summarize_run from quacc.util.basics import merge_dicts from quacc.util.calc import run_calc @dataclass class StaticMaker(Maker): """ Class to carry out a single-point calculation. Parameters ---------- name Name of the job. method The level of theory to use. basis Basis set swaps Dictionary of custom kwargs for the calculator. """ name: str = "Psi4-Static" method: str = "wb97x-v" basis: str = "def2-tzvp" swaps: Dict[str, Any] = None @job @requires(psi4, "Psi4 be installed. Try conda install -c psi4 psi4") def make( self, atoms: Atoms, charge: int = None, mult: int = None ) -> Dict[str, Any]: """ Make the run. Parameters ---------- atoms .Atoms object` charge Charge of the system. If None, this is determined from the sum of atoms.get_initial_charges(). mult Multiplicity of the system. If None, this is determined from 1+ the sum of atoms.get_initial_magnetic_moments(). Returns ------- Dict Summary of the run. """ swaps = self.swaps or {} defaults = { "mem": "16GB", "num_threads": "max", "method": self.method, "basis": self.basis, "charge": charge if charge else round(sum(atoms.get_initial_charges())), "multiplicity": mult if mult else round(1 + sum(atoms.get_initial_magnetic_moments())), } flags = merge_dicts(defaults, swaps, remove_none=True) atoms.calc = Psi4(**flags) new_atoms = run_calc(atoms) summary = summarize_run( new_atoms, input_atoms=atoms, additional_fields={"name": self.name} ) return summary
<filename>quacc/recipes/psi4/core.py """Core recipes for Psi4""" from __future__ import annotations from dataclasses import dataclass from typing import Any, Dict from ase.atoms import Atoms from ase.calculators.psi4 import Psi4 from jobflow import Maker, job from monty.dev import requires try: import psi4 except: psi4 = None from quacc.schemas.calc import summarize_run from quacc.util.basics import merge_dicts from quacc.util.calc import run_calc @dataclass class StaticMaker(Maker): """ Class to carry out a single-point calculation. Parameters ---------- name Name of the job. method The level of theory to use. basis Basis set swaps Dictionary of custom kwargs for the calculator. """ name: str = "Psi4-Static" method: str = "wb97x-v" basis: str = "def2-tzvp" swaps: Dict[str, Any] = None @job @requires(psi4, "Psi4 be installed. Try conda install -c psi4 psi4") def make( self, atoms: Atoms, charge: int = None, mult: int = None ) -> Dict[str, Any]: """ Make the run. Parameters ---------- atoms .Atoms object` charge Charge of the system. If None, this is determined from the sum of atoms.get_initial_charges(). mult Multiplicity of the system. If None, this is determined from 1+ the sum of atoms.get_initial_magnetic_moments(). Returns ------- Dict Summary of the run. """ swaps = self.swaps or {} defaults = { "mem": "16GB", "num_threads": "max", "method": self.method, "basis": self.basis, "charge": charge if charge else round(sum(atoms.get_initial_charges())), "multiplicity": mult if mult else round(1 + sum(atoms.get_initial_magnetic_moments())), } flags = merge_dicts(defaults, swaps, remove_none=True) atoms.calc = Psi4(**flags) new_atoms = run_calc(atoms) summary = summarize_run( new_atoms, input_atoms=atoms, additional_fields={"name": self.name} ) return summary
en
0.720651
Core recipes for Psi4 Class to carry out a single-point calculation. Parameters ---------- name Name of the job. method The level of theory to use. basis Basis set swaps Dictionary of custom kwargs for the calculator. Make the run. Parameters ---------- atoms .Atoms object` charge Charge of the system. If None, this is determined from the sum of atoms.get_initial_charges(). mult Multiplicity of the system. If None, this is determined from 1+ the sum of atoms.get_initial_magnetic_moments(). Returns ------- Dict Summary of the run.
2.075814
2
UAS/UAS 11 & 12/main.py
Archedar/UAS
0
9881
#Main Program from Class import Barang import Menu histori = list() listBarang = [ Barang('Rinso', 5000, 20), Barang('Sabun', 3000, 20), Barang('Pulpen', 2500, 20), Barang('Tisu', 10000, 20), Barang('Penggaris', 1000, 20) ] while True: print(''' Menu 1. Tampilkan Barang 2. Tambahkan Barang 3. Tambah Stock Barang 4. Hapus Barang 5. Cari Barang Berdasarkan Keyword 6. Hitung Barang Belanjaan 7. Histori Keluar Masuk Barang 0. Keluar Program ''') choice = input('Masukan No Menu: ') if choice == '1': Menu.menu1(listBarang) elif choice == '2': Menu.menu2(listBarang, histori) elif choice == '3': Menu.menu3(listBarang, histori) elif choice == '4': Menu.menu4(listBarang, histori) elif choice == '5': Menu.menu5(listBarang) elif choice == '6': Menu.menu6(listBarang, histori) elif choice == '7': Menu.menu7(histori) elif choice == '0': print('Keluar Program') break else: print('Invalid Input!')
#Main Program from Class import Barang import Menu histori = list() listBarang = [ Barang('Rinso', 5000, 20), Barang('Sabun', 3000, 20), Barang('Pulpen', 2500, 20), Barang('Tisu', 10000, 20), Barang('Penggaris', 1000, 20) ] while True: print(''' Menu 1. Tampilkan Barang 2. Tambahkan Barang 3. Tambah Stock Barang 4. Hapus Barang 5. Cari Barang Berdasarkan Keyword 6. Hitung Barang Belanjaan 7. Histori Keluar Masuk Barang 0. Keluar Program ''') choice = input('Masukan No Menu: ') if choice == '1': Menu.menu1(listBarang) elif choice == '2': Menu.menu2(listBarang, histori) elif choice == '3': Menu.menu3(listBarang, histori) elif choice == '4': Menu.menu4(listBarang, histori) elif choice == '5': Menu.menu5(listBarang) elif choice == '6': Menu.menu6(listBarang, histori) elif choice == '7': Menu.menu7(histori) elif choice == '0': print('Keluar Program') break else: print('Invalid Input!')
id
0.354467
#Main Program Menu 1. Tampilkan Barang 2. Tambahkan Barang 3. Tambah Stock Barang 4. Hapus Barang 5. Cari Barang Berdasarkan Keyword 6. Hitung Barang Belanjaan 7. Histori Keluar Masuk Barang 0. Keluar Program
3.72465
4
original/baselines/train/JointE+ONE.py
thunlp/JointNRE
186
9882
#coding:utf-8 import numpy as np import tensorflow as tf import os import time import datetime import ctypes import threading import json ll1 = ctypes.cdll.LoadLibrary lib_cnn = ll1("./init_cnn.so") ll2 = ctypes.cdll.LoadLibrary lib_kg = ll2("./init_know.so") class Config(object): def __init__(self): self.instanceTot = lib_cnn.getInstanceTot() self.sequence_size = lib_cnn.getLenLimit() self.num_classes = lib_cnn.getRelationTotal() self.num_words = lib_cnn.getWordTotal() self.num_positions = 2 * lib_cnn.getPositionLimit() + 1 self.word_size = lib_cnn.getWordDimension() self.position_size = 5 self.embedding_size = self.word_size + self.position_size * 2 self.filter_size = 3 self.num_filters = 230 self.relation_size = self.word_size#230 self.dropout_keep_prob = 0.5 self.l2_lambda = 0.0001 self.NA = 51 lib_cnn.setNA(self.NA) lib_cnn.setRate(3) self.margin = 1.0 self.nbatches = 100 self.trainTimes = 15 self.entityTotal = 0 self.relationTotal = 0 class Model(object): def __init__(self, config): sequence_size = config.sequence_size num_classes = config.num_classes num_words = config.num_words num_positions = config.num_positions embedding_size = config.embedding_size word_size = config.word_size position_size = config.position_size relation_size = config.relation_size filter_size = config.filter_size num_filters = config.num_filters dropout_keep_prob = config.dropout_keep_prob margin = config.margin l2_lambda = config.l2_lambda self.input_x = tf.placeholder(tf.int32, [None, sequence_size], name = "input_x") self.input_p_h = tf.placeholder(tf.int32, [None, sequence_size], name = "input_p_h") self.input_p_t = tf.placeholder(tf.int32, [None, sequence_size], name = "input_p_t") self.input_r = tf.placeholder(tf.float32, [1, 1], name = "input_r") self.input_r_n = tf.placeholder(tf.float32, [1, 1], name = "input_r_n") self.input_h = tf.placeholder(tf.int32, [1, 1], name = "input_h") self.input_t = tf.placeholder(tf.int32, [1, 1], name = "input_t") self.input_y = tf.placeholder(tf.float32, [1, num_classes], name = "input_y") self.pos_h = tf.placeholder(tf.int32, [None]) self.pos_t = tf.placeholder(tf.int32, [None]) self.pos_r = tf.placeholder(tf.int32, [None]) self.neg_h = tf.placeholder(tf.int32, [None]) self.neg_t = tf.placeholder(tf.int32, [None]) self.neg_r = tf.placeholder(tf.int32, [None]) l2_loss = tf.constant(0.0) with tf.name_scope("embedding-lookup"): self.word_embeddings = tf.Variable(word_embeddings, name="word_embeddings") self.relation_embeddings = tf.get_variable("relation_embeddings", [config.relationTotal, word_size]) self.position_embeddings = tf.get_variable("position_embeddings", [num_positions, position_size]) self.relation_attention = tf.get_variable("relation_attention", [num_classes, relation_size]) self.NAattention = tf.get_variable("NAattention", [relation_size, 1]) self.attention = tf.get_variable("attention", [num_filters, relation_size]) #know pos_h_e = tf.nn.embedding_lookup(self.word_embeddings, self.pos_h) pos_t_e = tf.nn.embedding_lookup(self.word_embeddings, self.pos_t) pos_r_e = tf.nn.embedding_lookup(self.relation_embeddings, self.pos_r) neg_h_e = tf.nn.embedding_lookup(self.word_embeddings, self.neg_h) neg_t_e = tf.nn.embedding_lookup(self.word_embeddings, self.neg_t) neg_r_e = tf.nn.embedding_lookup(self.relation_embeddings, self.neg_r) #cnn self.x_initial = tf.nn.embedding_lookup(self.word_embeddings, self.input_x) self.x_p_h = tf.nn.embedding_lookup(self.position_embeddings, self.input_p_h) self.x_p_t = tf.nn.embedding_lookup(self.position_embeddings, self.input_p_t) self.x = tf.expand_dims(tf.concat(2, [self.x_initial, self.x_p_h, self.x_p_t]), -1) self.head = tf.nn.embedding_lookup(self.word_embeddings, self.input_h) self.tail = tf.nn.embedding_lookup(self.word_embeddings, self.input_t) l2_loss += tf.nn.l2_loss(self.attention) with tf.name_scope("conv-maxpool"): self.W = tf.get_variable("W", [filter_size, embedding_size, 1, num_filters]) self.b = tf.get_variable("b", [num_filters]) conv = tf.nn.conv2d(self.x, self.W, strides=[1, 1, 1, 1], padding="VALID", name="conv") h = tf.nn.tanh(tf.nn.bias_add(conv, self.b), name="tanh") self.y = tf.nn.max_pool(h, ksize=[1, sequence_size - filter_size + 1, 1, 1], strides=[1, 1, 1, 1], padding='VALID', name="pool") l2_loss += tf.nn.l2_loss(self.W) l2_loss += tf.nn.l2_loss(self.b) self.y = tf.reshape(self.y, [-1, num_filters]) with tf.name_scope('attention'): self.y_attention = tf.reduce_max(self.y, 0 , keep_dims = True) with tf.name_scope("dropout"): self.y_attention = tf.nn.l2_normalize(self.y_attention, 1) self.h_drop = tf.nn.dropout(self.y_attention, dropout_keep_prob) self.transfer_w = tf.get_variable("transfer_w", [num_filters, num_classes]) self.scores = tf.matmul(self.h_drop, self.transfer_w) l2_loss += tf.nn.l2_loss(self.transfer_w) with tf.name_scope("loss"): cross_entropy = tf.nn.softmax_cross_entropy_with_logits(self.scores, self.input_y) self.loss_cnn = tf.reduce_mean(cross_entropy) + l2_lambda * l2_loss pos = tf.reduce_sum(abs(pos_h_e + pos_r_e - pos_t_e), 1, keep_dims = True) neg = tf.reduce_sum(abs(neg_h_e + neg_r_e - neg_t_e), 1, keep_dims = True) self.loss_kg = tf.reduce_sum(tf.maximum(pos - neg + margin, 0)) with tf.name_scope("accuracy"): self.predictions = tf.argmax(self.scores, 1, name="predictions") correct_predictions = tf.equal(self.predictions, tf.argmax(self.input_y, 1)) self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy") bags_sum = 0.0 bags_hit_NA = 0.0 sum_NA = 0.0 sum_fNA = 0.0 bags_hit = 0.0 loss_sum = 0.0 if __name__ == "__main__": lib_cnn.readWordVec() lib_cnn.readFromFile() lib_kg.init() np.random.seed(0) tf.set_random_seed(0) config = Config() word_embeddings = np.zeros(config.num_words * config.word_size, dtype = np.float32) lib_cnn.getWordVec.argtypes = [ctypes.c_void_p] lib_cnn.getWordVec(word_embeddings.__array_interface__['data'][0]) word_embeddings.resize((config.num_words,config.word_size)) config.batch_size = lib_kg.getTripleTotal() / config.nbatches config.entityTotal = lib_kg.getEntityTotal() config.relationTotal = lib_kg.getRelationTotal() with tf.Graph().as_default(): conf = tf.ConfigProto() sess = tf.Session(config=conf) with sess.as_default(): initializer = tf.contrib.layers.xavier_initializer() with tf.variable_scope("model", reuse=None, initializer = initializer): m = Model(config = config) global_step_cnn = tf.Variable(0, name="global_step_cnn", trainable=False) optimizer_cnn = tf.train.GradientDescentOptimizer(0.01) grads_and_vars_cnn = optimizer_cnn.compute_gradients(m.loss_cnn) train_op_cnn = optimizer_cnn.apply_gradients(grads_and_vars_cnn, global_step = global_step_cnn) global_step_kg = tf.Variable(0, name="global_step_kg", trainable=False) optimizer_kg = tf.train.GradientDescentOptimizer(0.001) grads_and_vars_kg = optimizer_kg.compute_gradients(m.loss_kg) train_op_kg = optimizer_kg.apply_gradients(grads_and_vars_kg, global_step=global_step_kg) sess.run(tf.initialize_all_variables()) def outEmbedding(str1): word_embeddings, relation_embeddings, position_embeddings, relation_attention, attention, W, B, transfer_w, transfer_b, softmax_w, softmax_b = sess.run([m.word_embeddings, m.relation_embeddings, m.position_embeddings, m.relation_attention, m.attention, m.W, m.b, m.transfer_w, m.transfer_b, m.softmax_w, m.softmax_b]) log = open("log"+str1+".txt", "w") log.write(json.dumps(word_embeddings.tolist())+"\n") log.write(json.dumps(relation_embeddings.tolist())+"\n") log.write(json.dumps(position_embeddings.tolist())+"\n") log.write(json.dumps(relation_attention.tolist())+"\n") log.write(json.dumps(attention.tolist())+"\n") log.write(json.dumps(W.tolist())+"\n") log.write(json.dumps(B.tolist())+"\n") log.write(json.dumps(transfer_w.tolist())+"\n") NAattention = sess.run(m.NAattention) log.write(json.dumps(NAattention.tolist()) + "\n") log.close() x_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32) p_t_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32) p_h_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32) r_batch = np.zeros((1, 1), dtype = np.int32) y_batch = np.zeros((1, config.num_classes), dtype = np.int32) r_n_batch = np.zeros((1, 1), dtype = np.float32) h_batch = np.zeros((1, 1), dtype = np.int32) t_batch = np.zeros((1, 1), dtype = np.int32) x_batch_addr = x_batch.__array_interface__['data'][0] p_t_batch_addr = p_t_batch.__array_interface__['data'][0] p_h_batch_addr = p_h_batch.__array_interface__['data'][0] y_batch_addr = y_batch.__array_interface__['data'][0] r_batch_addr = r_batch.__array_interface__['data'][0] r_n_batch_addr = r_n_batch.__array_interface__['data'][0] h_batch_addr = h_batch.__array_interface__['data'][0] t_batch_addr = t_batch.__array_interface__['data'][0] lib_cnn.batch_iter.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] tipTotal = lib_cnn.getTipTotal() loop = 0 def train_cnn(coord): def train_step_cnn(x_batch, p_h_batch, p_t_batch, y_batch, r_batch, r_n_batch, h_batch, t_batch): global bags_sum, bags_hit, loss_sum, bags_hit_NA, bags_hit, sum_fNA, sum_NA feed_dict = { m.input_x: x_batch, m.input_p_h: p_h_batch, m.input_p_t: p_t_batch, m.input_r: r_batch, m.input_r_n: r_n_batch, m.input_y: y_batch, m.input_h: h_batch, m.input_t: t_batch } _, step, loss, accuracy = sess.run( [train_op_cnn, global_step_cnn, m.loss_cnn, m.accuracy], feed_dict) time_str = datetime.datetime.now().isoformat() loss_sum += loss bags_sum += 1 if (r_batch[0]!=config.NA): sum_fNA += 1 if accuracy > 0.5: bags_hit += 1.0 else: sum_NA += 1 if accuracy > 0.5: bags_hit_NA += 1.0 if bags_sum % 1000 == 0: if (sum_NA == 0): sum_NA+=1 if (sum_fNA == 0): sum_fNA+=1 print("{}: step {}, loss {:g}, acc {:g} acc {:g} {} {}".format(time_str, step, loss_sum/bags_sum, bags_hit_NA/sum_NA, bags_hit/sum_fNA, sum_NA, sum_fNA)) global loop while not coord.should_stop(): print 'Looping ', loop outEmbedding(str(loop)) for i in range(tipTotal): length = lib_cnn.batch_iter(x_batch_addr, p_h_batch_addr, p_t_batch_addr, y_batch_addr, r_batch_addr, r_n_batch_addr, h_batch_addr, t_batch_addr) train_step_cnn(x_batch[0:length,], p_h_batch[0:length,], p_t_batch[0:length,], y_batch, r_batch, r_n_batch, h_batch, t_batch) global bags_sum, bags_hit, loss_sum, bags_hit_NA, bags_hit, sum_fNA, sum_NA bags_sum = 0 bags_hit = 0 bags_hit_NA = 0 loss_sum = 0 sum_fNA = 0 sum_NA = 0 loop += 1 if loop == config.trainTimes: coord.request_stop() ph = np.zeros(config.batch_size * 2, dtype = np.int32) pt = np.zeros(config.batch_size * 2, dtype = np.int32) pr = np.zeros(config.batch_size * 2, dtype = np.int32) nh = np.zeros(config.batch_size * 2, dtype = np.int32) nt = np.zeros(config.batch_size * 2, dtype = np.int32) nr = np.zeros(config.batch_size * 2, dtype = np.int32) ph_addr = ph.__array_interface__['data'][0] pt_addr = pt.__array_interface__['data'][0] pr_addr = pr.__array_interface__['data'][0] nh_addr = nh.__array_interface__['data'][0] nt_addr = nt.__array_interface__['data'][0] nr_addr = nr.__array_interface__['data'][0] lib_kg.getBatch.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] times_kg = 0 def train_kg(coord): def train_step_kg(pos_h_batch, pos_t_batch, pos_r_batch, neg_h_batch, neg_t_batch, neg_r_batch): feed_dict = { m.pos_h: pos_h_batch, m.pos_t: pos_t_batch, m.pos_r: pos_r_batch, m.neg_h: neg_h_batch, m.neg_t: neg_t_batch, m.neg_r: neg_r_batch } _, step, loss = sess.run( [train_op_kg, global_step_kg, m.loss_kg], feed_dict) return loss global times_kg while not coord.should_stop(): times_kg += 1 res = 0.0 for batch in range(config.nbatches): lib_kg.getBatch(ph_addr, pt_addr, pr_addr, nh_addr, nt_addr, nr_addr, config.batch_size) res += train_step_kg(ph, pt, pr, nh, nt, nr) coord = tf.train.Coordinator() threads = [] threads.append(threading.Thread(target=train_kg, args=(coord,))) threads.append(threading.Thread(target=train_cnn, args=(coord,))) for t in threads: t.start() coord.join(threads)
#coding:utf-8 import numpy as np import tensorflow as tf import os import time import datetime import ctypes import threading import json ll1 = ctypes.cdll.LoadLibrary lib_cnn = ll1("./init_cnn.so") ll2 = ctypes.cdll.LoadLibrary lib_kg = ll2("./init_know.so") class Config(object): def __init__(self): self.instanceTot = lib_cnn.getInstanceTot() self.sequence_size = lib_cnn.getLenLimit() self.num_classes = lib_cnn.getRelationTotal() self.num_words = lib_cnn.getWordTotal() self.num_positions = 2 * lib_cnn.getPositionLimit() + 1 self.word_size = lib_cnn.getWordDimension() self.position_size = 5 self.embedding_size = self.word_size + self.position_size * 2 self.filter_size = 3 self.num_filters = 230 self.relation_size = self.word_size#230 self.dropout_keep_prob = 0.5 self.l2_lambda = 0.0001 self.NA = 51 lib_cnn.setNA(self.NA) lib_cnn.setRate(3) self.margin = 1.0 self.nbatches = 100 self.trainTimes = 15 self.entityTotal = 0 self.relationTotal = 0 class Model(object): def __init__(self, config): sequence_size = config.sequence_size num_classes = config.num_classes num_words = config.num_words num_positions = config.num_positions embedding_size = config.embedding_size word_size = config.word_size position_size = config.position_size relation_size = config.relation_size filter_size = config.filter_size num_filters = config.num_filters dropout_keep_prob = config.dropout_keep_prob margin = config.margin l2_lambda = config.l2_lambda self.input_x = tf.placeholder(tf.int32, [None, sequence_size], name = "input_x") self.input_p_h = tf.placeholder(tf.int32, [None, sequence_size], name = "input_p_h") self.input_p_t = tf.placeholder(tf.int32, [None, sequence_size], name = "input_p_t") self.input_r = tf.placeholder(tf.float32, [1, 1], name = "input_r") self.input_r_n = tf.placeholder(tf.float32, [1, 1], name = "input_r_n") self.input_h = tf.placeholder(tf.int32, [1, 1], name = "input_h") self.input_t = tf.placeholder(tf.int32, [1, 1], name = "input_t") self.input_y = tf.placeholder(tf.float32, [1, num_classes], name = "input_y") self.pos_h = tf.placeholder(tf.int32, [None]) self.pos_t = tf.placeholder(tf.int32, [None]) self.pos_r = tf.placeholder(tf.int32, [None]) self.neg_h = tf.placeholder(tf.int32, [None]) self.neg_t = tf.placeholder(tf.int32, [None]) self.neg_r = tf.placeholder(tf.int32, [None]) l2_loss = tf.constant(0.0) with tf.name_scope("embedding-lookup"): self.word_embeddings = tf.Variable(word_embeddings, name="word_embeddings") self.relation_embeddings = tf.get_variable("relation_embeddings", [config.relationTotal, word_size]) self.position_embeddings = tf.get_variable("position_embeddings", [num_positions, position_size]) self.relation_attention = tf.get_variable("relation_attention", [num_classes, relation_size]) self.NAattention = tf.get_variable("NAattention", [relation_size, 1]) self.attention = tf.get_variable("attention", [num_filters, relation_size]) #know pos_h_e = tf.nn.embedding_lookup(self.word_embeddings, self.pos_h) pos_t_e = tf.nn.embedding_lookup(self.word_embeddings, self.pos_t) pos_r_e = tf.nn.embedding_lookup(self.relation_embeddings, self.pos_r) neg_h_e = tf.nn.embedding_lookup(self.word_embeddings, self.neg_h) neg_t_e = tf.nn.embedding_lookup(self.word_embeddings, self.neg_t) neg_r_e = tf.nn.embedding_lookup(self.relation_embeddings, self.neg_r) #cnn self.x_initial = tf.nn.embedding_lookup(self.word_embeddings, self.input_x) self.x_p_h = tf.nn.embedding_lookup(self.position_embeddings, self.input_p_h) self.x_p_t = tf.nn.embedding_lookup(self.position_embeddings, self.input_p_t) self.x = tf.expand_dims(tf.concat(2, [self.x_initial, self.x_p_h, self.x_p_t]), -1) self.head = tf.nn.embedding_lookup(self.word_embeddings, self.input_h) self.tail = tf.nn.embedding_lookup(self.word_embeddings, self.input_t) l2_loss += tf.nn.l2_loss(self.attention) with tf.name_scope("conv-maxpool"): self.W = tf.get_variable("W", [filter_size, embedding_size, 1, num_filters]) self.b = tf.get_variable("b", [num_filters]) conv = tf.nn.conv2d(self.x, self.W, strides=[1, 1, 1, 1], padding="VALID", name="conv") h = tf.nn.tanh(tf.nn.bias_add(conv, self.b), name="tanh") self.y = tf.nn.max_pool(h, ksize=[1, sequence_size - filter_size + 1, 1, 1], strides=[1, 1, 1, 1], padding='VALID', name="pool") l2_loss += tf.nn.l2_loss(self.W) l2_loss += tf.nn.l2_loss(self.b) self.y = tf.reshape(self.y, [-1, num_filters]) with tf.name_scope('attention'): self.y_attention = tf.reduce_max(self.y, 0 , keep_dims = True) with tf.name_scope("dropout"): self.y_attention = tf.nn.l2_normalize(self.y_attention, 1) self.h_drop = tf.nn.dropout(self.y_attention, dropout_keep_prob) self.transfer_w = tf.get_variable("transfer_w", [num_filters, num_classes]) self.scores = tf.matmul(self.h_drop, self.transfer_w) l2_loss += tf.nn.l2_loss(self.transfer_w) with tf.name_scope("loss"): cross_entropy = tf.nn.softmax_cross_entropy_with_logits(self.scores, self.input_y) self.loss_cnn = tf.reduce_mean(cross_entropy) + l2_lambda * l2_loss pos = tf.reduce_sum(abs(pos_h_e + pos_r_e - pos_t_e), 1, keep_dims = True) neg = tf.reduce_sum(abs(neg_h_e + neg_r_e - neg_t_e), 1, keep_dims = True) self.loss_kg = tf.reduce_sum(tf.maximum(pos - neg + margin, 0)) with tf.name_scope("accuracy"): self.predictions = tf.argmax(self.scores, 1, name="predictions") correct_predictions = tf.equal(self.predictions, tf.argmax(self.input_y, 1)) self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy") bags_sum = 0.0 bags_hit_NA = 0.0 sum_NA = 0.0 sum_fNA = 0.0 bags_hit = 0.0 loss_sum = 0.0 if __name__ == "__main__": lib_cnn.readWordVec() lib_cnn.readFromFile() lib_kg.init() np.random.seed(0) tf.set_random_seed(0) config = Config() word_embeddings = np.zeros(config.num_words * config.word_size, dtype = np.float32) lib_cnn.getWordVec.argtypes = [ctypes.c_void_p] lib_cnn.getWordVec(word_embeddings.__array_interface__['data'][0]) word_embeddings.resize((config.num_words,config.word_size)) config.batch_size = lib_kg.getTripleTotal() / config.nbatches config.entityTotal = lib_kg.getEntityTotal() config.relationTotal = lib_kg.getRelationTotal() with tf.Graph().as_default(): conf = tf.ConfigProto() sess = tf.Session(config=conf) with sess.as_default(): initializer = tf.contrib.layers.xavier_initializer() with tf.variable_scope("model", reuse=None, initializer = initializer): m = Model(config = config) global_step_cnn = tf.Variable(0, name="global_step_cnn", trainable=False) optimizer_cnn = tf.train.GradientDescentOptimizer(0.01) grads_and_vars_cnn = optimizer_cnn.compute_gradients(m.loss_cnn) train_op_cnn = optimizer_cnn.apply_gradients(grads_and_vars_cnn, global_step = global_step_cnn) global_step_kg = tf.Variable(0, name="global_step_kg", trainable=False) optimizer_kg = tf.train.GradientDescentOptimizer(0.001) grads_and_vars_kg = optimizer_kg.compute_gradients(m.loss_kg) train_op_kg = optimizer_kg.apply_gradients(grads_and_vars_kg, global_step=global_step_kg) sess.run(tf.initialize_all_variables()) def outEmbedding(str1): word_embeddings, relation_embeddings, position_embeddings, relation_attention, attention, W, B, transfer_w, transfer_b, softmax_w, softmax_b = sess.run([m.word_embeddings, m.relation_embeddings, m.position_embeddings, m.relation_attention, m.attention, m.W, m.b, m.transfer_w, m.transfer_b, m.softmax_w, m.softmax_b]) log = open("log"+str1+".txt", "w") log.write(json.dumps(word_embeddings.tolist())+"\n") log.write(json.dumps(relation_embeddings.tolist())+"\n") log.write(json.dumps(position_embeddings.tolist())+"\n") log.write(json.dumps(relation_attention.tolist())+"\n") log.write(json.dumps(attention.tolist())+"\n") log.write(json.dumps(W.tolist())+"\n") log.write(json.dumps(B.tolist())+"\n") log.write(json.dumps(transfer_w.tolist())+"\n") NAattention = sess.run(m.NAattention) log.write(json.dumps(NAattention.tolist()) + "\n") log.close() x_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32) p_t_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32) p_h_batch = np.zeros((config.instanceTot,config.sequence_size), dtype = np.int32) r_batch = np.zeros((1, 1), dtype = np.int32) y_batch = np.zeros((1, config.num_classes), dtype = np.int32) r_n_batch = np.zeros((1, 1), dtype = np.float32) h_batch = np.zeros((1, 1), dtype = np.int32) t_batch = np.zeros((1, 1), dtype = np.int32) x_batch_addr = x_batch.__array_interface__['data'][0] p_t_batch_addr = p_t_batch.__array_interface__['data'][0] p_h_batch_addr = p_h_batch.__array_interface__['data'][0] y_batch_addr = y_batch.__array_interface__['data'][0] r_batch_addr = r_batch.__array_interface__['data'][0] r_n_batch_addr = r_n_batch.__array_interface__['data'][0] h_batch_addr = h_batch.__array_interface__['data'][0] t_batch_addr = t_batch.__array_interface__['data'][0] lib_cnn.batch_iter.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] tipTotal = lib_cnn.getTipTotal() loop = 0 def train_cnn(coord): def train_step_cnn(x_batch, p_h_batch, p_t_batch, y_batch, r_batch, r_n_batch, h_batch, t_batch): global bags_sum, bags_hit, loss_sum, bags_hit_NA, bags_hit, sum_fNA, sum_NA feed_dict = { m.input_x: x_batch, m.input_p_h: p_h_batch, m.input_p_t: p_t_batch, m.input_r: r_batch, m.input_r_n: r_n_batch, m.input_y: y_batch, m.input_h: h_batch, m.input_t: t_batch } _, step, loss, accuracy = sess.run( [train_op_cnn, global_step_cnn, m.loss_cnn, m.accuracy], feed_dict) time_str = datetime.datetime.now().isoformat() loss_sum += loss bags_sum += 1 if (r_batch[0]!=config.NA): sum_fNA += 1 if accuracy > 0.5: bags_hit += 1.0 else: sum_NA += 1 if accuracy > 0.5: bags_hit_NA += 1.0 if bags_sum % 1000 == 0: if (sum_NA == 0): sum_NA+=1 if (sum_fNA == 0): sum_fNA+=1 print("{}: step {}, loss {:g}, acc {:g} acc {:g} {} {}".format(time_str, step, loss_sum/bags_sum, bags_hit_NA/sum_NA, bags_hit/sum_fNA, sum_NA, sum_fNA)) global loop while not coord.should_stop(): print 'Looping ', loop outEmbedding(str(loop)) for i in range(tipTotal): length = lib_cnn.batch_iter(x_batch_addr, p_h_batch_addr, p_t_batch_addr, y_batch_addr, r_batch_addr, r_n_batch_addr, h_batch_addr, t_batch_addr) train_step_cnn(x_batch[0:length,], p_h_batch[0:length,], p_t_batch[0:length,], y_batch, r_batch, r_n_batch, h_batch, t_batch) global bags_sum, bags_hit, loss_sum, bags_hit_NA, bags_hit, sum_fNA, sum_NA bags_sum = 0 bags_hit = 0 bags_hit_NA = 0 loss_sum = 0 sum_fNA = 0 sum_NA = 0 loop += 1 if loop == config.trainTimes: coord.request_stop() ph = np.zeros(config.batch_size * 2, dtype = np.int32) pt = np.zeros(config.batch_size * 2, dtype = np.int32) pr = np.zeros(config.batch_size * 2, dtype = np.int32) nh = np.zeros(config.batch_size * 2, dtype = np.int32) nt = np.zeros(config.batch_size * 2, dtype = np.int32) nr = np.zeros(config.batch_size * 2, dtype = np.int32) ph_addr = ph.__array_interface__['data'][0] pt_addr = pt.__array_interface__['data'][0] pr_addr = pr.__array_interface__['data'][0] nh_addr = nh.__array_interface__['data'][0] nt_addr = nt.__array_interface__['data'][0] nr_addr = nr.__array_interface__['data'][0] lib_kg.getBatch.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] times_kg = 0 def train_kg(coord): def train_step_kg(pos_h_batch, pos_t_batch, pos_r_batch, neg_h_batch, neg_t_batch, neg_r_batch): feed_dict = { m.pos_h: pos_h_batch, m.pos_t: pos_t_batch, m.pos_r: pos_r_batch, m.neg_h: neg_h_batch, m.neg_t: neg_t_batch, m.neg_r: neg_r_batch } _, step, loss = sess.run( [train_op_kg, global_step_kg, m.loss_kg], feed_dict) return loss global times_kg while not coord.should_stop(): times_kg += 1 res = 0.0 for batch in range(config.nbatches): lib_kg.getBatch(ph_addr, pt_addr, pr_addr, nh_addr, nt_addr, nr_addr, config.batch_size) res += train_step_kg(ph, pt, pr, nh, nt, nr) coord = tf.train.Coordinator() threads = [] threads.append(threading.Thread(target=train_kg, args=(coord,))) threads.append(threading.Thread(target=train_cnn, args=(coord,))) for t in threads: t.start() coord.join(threads)
en
0.771487
#coding:utf-8 #230 #know #cnn
2.180449
2
i2vec_cli/__main__.py
rachmadaniHaryono/i2vec_cli
0
9883
#!/usr/bin/env python3 """get tag from http://demo.illustration2vec.net/.""" # note: # - error 'ERROR: Request Entity Too Large' for file 1.1 mb # <span style="color:red;">ERROR: Request Entity Too Large</span> from collections import OrderedDict from pathlib import Path from pprint import pformat import imghdr import logging import os import shutil import time import urllib import hashlib import click import requests import structlog import peewee from PIL import Image from i2vec_cli import models from i2vec_cli.requests_session import Session, convert_raw_to_hydrus from i2vec_cli.sha256 import sha256_checksum from i2vec_cli.utils import user_data_dir, thumb_folder def is_url(path): """Return True if path is url, False otherwise.""" scheme = urllib.parse.urlparse(path).scheme if scheme in ('http', 'https'): return True return False def is_ext_equal(file_ext, imghdr_ext): """compare file extension with result from imghdr_ext.""" if not imghdr_ext: return False if file_ext.lower() == '.{}'.format(imghdr_ext): return True if file_ext.lower() in ('.jpg', '.jpeg') and imghdr_ext == 'jpeg': return True return False def download(url, no_clobber): """download url. Args: url: URL to be downloaded. no_clobber: Skip download if file already exist. Returns: Downloaded filename or existing file if `no_clobber` is `True` """ log = structlog.getLogger() basename = os.path.basename(url) if os.path.isfile(basename) and no_clobber: return basename response = requests.get(url, stream=True) with open(basename, 'wb') as out_file: shutil.copyfileobj(response.raw, out_file) name, ext = os.path.splitext(basename) imghdr_ext = imghdr.what(basename) ext_equal = is_ext_equal(file_ext=ext, imghdr_ext=imghdr_ext) if not imghdr_ext: log.debug("imghdr can't recognize file", file=basename) return basename else: new_basename = '{}.{}'.format(name, imghdr_ext) new_basename_exist = os.path.isfile(new_basename) if ext_equal: log.debug('Extension is equal', file_ext=ext, imghdr_ext=imghdr_ext) return basename elif not ext_equal: if new_basename_exist and not no_clobber: log.debug('Replace existing file', old=basename, new=new_basename) shutil.move(basename, new_basename) elif not new_basename_exist: log.debug('Rename file ext', file=basename, new_ext=imghdr_ext) shutil.move(basename, new_basename) else: log.debug('Not replace/rename file', no_clobber=no_clobber, new_basename=new_basename) return new_basename else: log.debug( 'Unknown condition', file=basename, ext_equal=ext_equal, new_basename_exist=new_basename_exist, imghdr_ext=imghdr_ext ) # just return base name if any error happen return basename def validate_close_delay(ctx, param, value): """validate close delay.""" try: value = int(value) except Exception as e: raise click.BadParameter( 'Error when validate close delay: value={}, error={}'.format(value, e)) if value >= -1: return value else: raise click.BadParameter('Close delay have to be bigger or equal than -1') def delay_close(close_delay): """delay when closing the program.""" log = structlog.getLogger() if close_delay == -1: click.pause() elif close_delay == 0: log.debug('No close delay') elif close_delay > 0: time.sleep(close_delay) else: log.error('Invalid close delay', v=close_delay) def md5_checksum(fname): hash_md5 = hashlib.md5() with open(fname, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) return hash_md5.hexdigest() def create_thumbnail(path, thumb_path): """create thumbnail.""" size = 320, 320 try: im = Image.open(path) im.thumbnail(size) im.save(thumb_path, "JPEG") except IOError: raise IOError("cannot create thumbnail for", path) def get_print_result(path, db_path, format, session): """get print result.""" # compatibility p = path sha256 = sha256_checksum(p) md5 = md5_checksum(p) thumb_path = os.path.join(user_data_dir, 'thumb', '{}.jpg'.format(sha256)) try: load_res = models.load_result(db=db_path, sha256=sha256, md5=md5) except models.Image.DoesNotExist: load_res = None if load_res: tags = {'prediction': load_res} else: tags = session.get_tags(path=p) try: models.save_result( db=db_path, sha256=sha256, md5=md5, prediction=tags['prediction']) except peewee.IntegrityError as e: log.debug(str(e)) except keyError as e: log.debug(str(tags)) if not os.path.isfile(thumb_path): create_thumbnail(p, thumb_path) if format == 'dict': return tags if format == 'hydrus': return convert_raw_to_hydrus(tags) else: return pformat(tags['prediction']) @click.command() @click.option('--format', type=click.Choice(['raw', 'hydrus']), default='raw') @click.option('-d', '--debug', is_flag=True, help="Enable debug.") @click.option('-nc', '--no-clobber', is_flag=True, help="Skip download url when file exist.") @click.option( '--close-delay', default=0, help="Close delay of the program.", callback=validate_close_delay) @click.option( '--driver', default=None, help="Driver for browser (deprecated).", type=click.Choice(['firefox', 'phantomjs', 'chrome', 'zope.testbrowser', 'django'])) @click.option('--dump-html', is_flag=True, help="Dump html table for debugging (deprecated).") @click.argument('path', nargs=-1) def main(format, path, debug, no_clobber, close_delay, driver=None, dump_html=False): """get tag from illustration2vec.""" if debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) structlog.configure_once(logger_factory=structlog.stdlib.LoggerFactory()) log = structlog.getLogger() if not path: raise ValueError('PATH required.') # init folder os.makedirs(user_data_dir, exist_ok=True) os.makedirs(thumb_folder, exist_ok=True) # database db_path = os.path.join(user_data_dir, 'main.db') if not os.path.isfile(db_path): Path(db_path).touch() models.database.init(db_path) try: models.init_all_tables() except peewee.OperationalError: log.debug('Table already created') session = Session(driver=driver) try: for p in path: if os.path.isfile(p): print('path:{}'.format(os.path.basename(p))) elif is_url(p): print('url:{}'.format(p)) p = download(p, no_clobber=no_clobber) else: log.error('Unknown path format or path is not exist', path=p) continue result = get_print_result( path=p, db_path=db_path, format=format, session=session) print(result) finally: delay_close(close_delay) if hasattr(session, 'browser'): session.browser.quit() if __name__ == '__main__': main()
#!/usr/bin/env python3 """get tag from http://demo.illustration2vec.net/.""" # note: # - error 'ERROR: Request Entity Too Large' for file 1.1 mb # <span style="color:red;">ERROR: Request Entity Too Large</span> from collections import OrderedDict from pathlib import Path from pprint import pformat import imghdr import logging import os import shutil import time import urllib import hashlib import click import requests import structlog import peewee from PIL import Image from i2vec_cli import models from i2vec_cli.requests_session import Session, convert_raw_to_hydrus from i2vec_cli.sha256 import sha256_checksum from i2vec_cli.utils import user_data_dir, thumb_folder def is_url(path): """Return True if path is url, False otherwise.""" scheme = urllib.parse.urlparse(path).scheme if scheme in ('http', 'https'): return True return False def is_ext_equal(file_ext, imghdr_ext): """compare file extension with result from imghdr_ext.""" if not imghdr_ext: return False if file_ext.lower() == '.{}'.format(imghdr_ext): return True if file_ext.lower() in ('.jpg', '.jpeg') and imghdr_ext == 'jpeg': return True return False def download(url, no_clobber): """download url. Args: url: URL to be downloaded. no_clobber: Skip download if file already exist. Returns: Downloaded filename or existing file if `no_clobber` is `True` """ log = structlog.getLogger() basename = os.path.basename(url) if os.path.isfile(basename) and no_clobber: return basename response = requests.get(url, stream=True) with open(basename, 'wb') as out_file: shutil.copyfileobj(response.raw, out_file) name, ext = os.path.splitext(basename) imghdr_ext = imghdr.what(basename) ext_equal = is_ext_equal(file_ext=ext, imghdr_ext=imghdr_ext) if not imghdr_ext: log.debug("imghdr can't recognize file", file=basename) return basename else: new_basename = '{}.{}'.format(name, imghdr_ext) new_basename_exist = os.path.isfile(new_basename) if ext_equal: log.debug('Extension is equal', file_ext=ext, imghdr_ext=imghdr_ext) return basename elif not ext_equal: if new_basename_exist and not no_clobber: log.debug('Replace existing file', old=basename, new=new_basename) shutil.move(basename, new_basename) elif not new_basename_exist: log.debug('Rename file ext', file=basename, new_ext=imghdr_ext) shutil.move(basename, new_basename) else: log.debug('Not replace/rename file', no_clobber=no_clobber, new_basename=new_basename) return new_basename else: log.debug( 'Unknown condition', file=basename, ext_equal=ext_equal, new_basename_exist=new_basename_exist, imghdr_ext=imghdr_ext ) # just return base name if any error happen return basename def validate_close_delay(ctx, param, value): """validate close delay.""" try: value = int(value) except Exception as e: raise click.BadParameter( 'Error when validate close delay: value={}, error={}'.format(value, e)) if value >= -1: return value else: raise click.BadParameter('Close delay have to be bigger or equal than -1') def delay_close(close_delay): """delay when closing the program.""" log = structlog.getLogger() if close_delay == -1: click.pause() elif close_delay == 0: log.debug('No close delay') elif close_delay > 0: time.sleep(close_delay) else: log.error('Invalid close delay', v=close_delay) def md5_checksum(fname): hash_md5 = hashlib.md5() with open(fname, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) return hash_md5.hexdigest() def create_thumbnail(path, thumb_path): """create thumbnail.""" size = 320, 320 try: im = Image.open(path) im.thumbnail(size) im.save(thumb_path, "JPEG") except IOError: raise IOError("cannot create thumbnail for", path) def get_print_result(path, db_path, format, session): """get print result.""" # compatibility p = path sha256 = sha256_checksum(p) md5 = md5_checksum(p) thumb_path = os.path.join(user_data_dir, 'thumb', '{}.jpg'.format(sha256)) try: load_res = models.load_result(db=db_path, sha256=sha256, md5=md5) except models.Image.DoesNotExist: load_res = None if load_res: tags = {'prediction': load_res} else: tags = session.get_tags(path=p) try: models.save_result( db=db_path, sha256=sha256, md5=md5, prediction=tags['prediction']) except peewee.IntegrityError as e: log.debug(str(e)) except keyError as e: log.debug(str(tags)) if not os.path.isfile(thumb_path): create_thumbnail(p, thumb_path) if format == 'dict': return tags if format == 'hydrus': return convert_raw_to_hydrus(tags) else: return pformat(tags['prediction']) @click.command() @click.option('--format', type=click.Choice(['raw', 'hydrus']), default='raw') @click.option('-d', '--debug', is_flag=True, help="Enable debug.") @click.option('-nc', '--no-clobber', is_flag=True, help="Skip download url when file exist.") @click.option( '--close-delay', default=0, help="Close delay of the program.", callback=validate_close_delay) @click.option( '--driver', default=None, help="Driver for browser (deprecated).", type=click.Choice(['firefox', 'phantomjs', 'chrome', 'zope.testbrowser', 'django'])) @click.option('--dump-html', is_flag=True, help="Dump html table for debugging (deprecated).") @click.argument('path', nargs=-1) def main(format, path, debug, no_clobber, close_delay, driver=None, dump_html=False): """get tag from illustration2vec.""" if debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) structlog.configure_once(logger_factory=structlog.stdlib.LoggerFactory()) log = structlog.getLogger() if not path: raise ValueError('PATH required.') # init folder os.makedirs(user_data_dir, exist_ok=True) os.makedirs(thumb_folder, exist_ok=True) # database db_path = os.path.join(user_data_dir, 'main.db') if not os.path.isfile(db_path): Path(db_path).touch() models.database.init(db_path) try: models.init_all_tables() except peewee.OperationalError: log.debug('Table already created') session = Session(driver=driver) try: for p in path: if os.path.isfile(p): print('path:{}'.format(os.path.basename(p))) elif is_url(p): print('url:{}'.format(p)) p = download(p, no_clobber=no_clobber) else: log.error('Unknown path format or path is not exist', path=p) continue result = get_print_result( path=p, db_path=db_path, format=format, session=session) print(result) finally: delay_close(close_delay) if hasattr(session, 'browser'): session.browser.quit() if __name__ == '__main__': main()
en
0.580715
#!/usr/bin/env python3 get tag from http://demo.illustration2vec.net/. # note: # - error 'ERROR: Request Entity Too Large' for file 1.1 mb # <span style="color:red;">ERROR: Request Entity Too Large</span> Return True if path is url, False otherwise. compare file extension with result from imghdr_ext. download url. Args: url: URL to be downloaded. no_clobber: Skip download if file already exist. Returns: Downloaded filename or existing file if `no_clobber` is `True` # just return base name if any error happen validate close delay. delay when closing the program. create thumbnail. get print result. # compatibility get tag from illustration2vec. # init folder # database
2.600625
3
cherrypy/lib/cptools.py
debrando/cherrypy
2
9884
"""Functions for builtin CherryPy tools.""" import logging import re from hashlib import md5 import six from six.moves import urllib import cherrypy from cherrypy._cpcompat import text_or_bytes from cherrypy.lib import httputil as _httputil from cherrypy.lib import is_iterator # Conditional HTTP request support # def validate_etags(autotags=False, debug=False): """Validate the current ETag against If-Match, If-None-Match headers. If autotags is True, an ETag response-header value will be provided from an MD5 hash of the response body (unless some other code has already provided an ETag header). If False (the default), the ETag will not be automatic. WARNING: the autotags feature is not designed for URL's which allow methods other than GET. For example, if a POST to the same URL returns no content, the automatic ETag will be incorrect, breaking a fundamental use for entity tags in a possibly destructive fashion. Likewise, if you raise 304 Not Modified, the response body will be empty, the ETag hash will be incorrect, and your application will break. See :rfc:`2616` Section 14.24. """ response = cherrypy.serving.response # Guard against being run twice. if hasattr(response, 'ETag'): return status, reason, msg = _httputil.valid_status(response.status) etag = response.headers.get('ETag') # Automatic ETag generation. See warning in docstring. if etag: if debug: cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS') elif not autotags: if debug: cherrypy.log('Autotags off', 'TOOLS.ETAGS') elif status != 200: if debug: cherrypy.log('Status not 200', 'TOOLS.ETAGS') else: etag = response.collapse_body() etag = '"%s"' % md5(etag).hexdigest() if debug: cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS') response.headers['ETag'] = etag response.ETag = etag # "If the request would, without the If-Match header field, result in # anything other than a 2xx or 412 status, then the If-Match header # MUST be ignored." if debug: cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS') if status >= 200 and status <= 299: request = cherrypy.serving.request conditions = request.headers.elements('If-Match') or [] conditions = [str(x) for x in conditions] if debug: cherrypy.log('If-Match conditions: %s' % repr(conditions), 'TOOLS.ETAGS') if conditions and not (conditions == ['*'] or etag in conditions): raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did ' 'not match %r' % (etag, conditions)) conditions = request.headers.elements('If-None-Match') or [] conditions = [str(x) for x in conditions] if debug: cherrypy.log('If-None-Match conditions: %s' % repr(conditions), 'TOOLS.ETAGS') if conditions == ['*'] or etag in conditions: if debug: cherrypy.log('request.method: %s' % request.method, 'TOOLS.ETAGS') if request.method in ('GET', 'HEAD'): raise cherrypy.HTTPRedirect([], 304) else: raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r ' 'matched %r' % (etag, conditions)) def validate_since(): """Validate the current Last-Modified against If-Modified-Since headers. If no code has set the Last-Modified response header, then no validation will be performed. """ response = cherrypy.serving.response lastmod = response.headers.get('Last-Modified') if lastmod: status, reason, msg = _httputil.valid_status(response.status) request = cherrypy.serving.request since = request.headers.get('If-Unmodified-Since') if since and since != lastmod: if (status >= 200 and status <= 299) or status == 412: raise cherrypy.HTTPError(412) since = request.headers.get('If-Modified-Since') if since and since == lastmod: if (status >= 200 and status <= 299) or status == 304: if request.method in ('GET', 'HEAD'): raise cherrypy.HTTPRedirect([], 304) else: raise cherrypy.HTTPError(412) # Tool code # def allow(methods=None, debug=False): """Raise 405 if request.method not in methods (default ['GET', 'HEAD']). The given methods are case-insensitive, and may be in any order. If only one method is allowed, you may supply a single string; if more than one, supply a list of strings. Regardless of whether the current method is allowed or not, this also emits an 'Allow' response header, containing the given methods. """ if not isinstance(methods, (tuple, list)): methods = [methods] methods = [m.upper() for m in methods if m] if not methods: methods = ['GET', 'HEAD'] elif 'GET' in methods and 'HEAD' not in methods: methods.append('HEAD') cherrypy.response.headers['Allow'] = ', '.join(methods) if cherrypy.request.method not in methods: if debug: cherrypy.log('request.method %r not in methods %r' % (cherrypy.request.method, methods), 'TOOLS.ALLOW') raise cherrypy.HTTPError(405) else: if debug: cherrypy.log('request.method %r in methods %r' % (cherrypy.request.method, methods), 'TOOLS.ALLOW') def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For', scheme='X-Forwarded-Proto', debug=False): """Change the base URL (scheme://host[:port][/path]). For running a CP server behind Apache, lighttpd, or other HTTP server. For Apache and lighttpd, you should leave the 'local' argument at the default value of 'X-Forwarded-Host'. For Squid, you probably want to set tools.proxy.local = 'Origin'. If you want the new request.base to include path info (not just the host), you must explicitly set base to the full base path, and ALSO set 'local' to '', so that the X-Forwarded-Host request header (which never includes path info) does not override it. Regardless, the value for 'base' MUST NOT end in a slash. cherrypy.request.remote.ip (the IP address of the client) will be rewritten if the header specified by the 'remote' arg is valid. By default, 'remote' is set to 'X-Forwarded-For'. If you do not want to rewrite remote.ip, set the 'remote' arg to an empty string. """ request = cherrypy.serving.request if scheme: s = request.headers.get(scheme, None) if debug: cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY') if s == 'on' and 'ssl' in scheme.lower(): # This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header scheme = 'https' else: # This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https' scheme = s if not scheme: scheme = request.base[:request.base.find('://')] if local: lbase = request.headers.get(local, None) if debug: cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY') if lbase is not None: base = lbase.split(',')[0] if not base: default = urllib.parse.urlparse(request.base).netloc base = request.headers.get('Host', default) if base.find('://') == -1: # add http:// or https:// if needed base = scheme + '://' + base request.base = base if remote: xff = request.headers.get(remote) if debug: cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY') if xff: if remote == 'X-Forwarded-For': # Grab the first IP in a comma-separated list. Ref #1268. xff = next(ip.strip() for ip in xff.split(',')) request.remote.ip = xff def ignore_headers(headers=('Range',), debug=False): """Delete request headers whose field names are included in 'headers'. This is a useful tool for working behind certain HTTP servers; for example, Apache duplicates the work that CP does for 'Range' headers, and will doubly-truncate the response. """ request = cherrypy.serving.request for name in headers: if name in request.headers: if debug: cherrypy.log('Ignoring request header %r' % name, 'TOOLS.IGNORE_HEADERS') del request.headers[name] def response_headers(headers=None, debug=False): """Set headers on the response.""" if debug: cherrypy.log('Setting response headers: %s' % repr(headers), 'TOOLS.RESPONSE_HEADERS') for name, value in (headers or []): cherrypy.serving.response.headers[name] = value response_headers.failsafe = True def referer(pattern, accept=True, accept_missing=False, error=403, message='Forbidden Referer header.', debug=False): """Raise HTTPError if Referer header does/does not match the given pattern. pattern A regular expression pattern to test against the Referer. accept If True, the Referer must match the pattern; if False, the Referer must NOT match the pattern. accept_missing If True, permit requests with no Referer header. error The HTTP error code to return to the client on failure. message A string to include in the response body on failure. """ try: ref = cherrypy.serving.request.headers['Referer'] match = bool(re.match(pattern, ref)) if debug: cherrypy.log('Referer %r matches %r' % (ref, pattern), 'TOOLS.REFERER') if accept == match: return except KeyError: if debug: cherrypy.log('No Referer header', 'TOOLS.REFERER') if accept_missing: return raise cherrypy.HTTPError(error, message) class SessionAuth(object): """Assert that the user is logged in.""" session_key = 'username' debug = False def check_username_and_password(self, username, password): pass def anonymous(self): """Provide a temporary user name for anonymous users.""" pass def on_login(self, username): pass def on_logout(self, username): pass def on_check(self, username): pass def login_screen(self, from_page='..', username='', error_msg='', **kwargs): return (six.text_type("""<html><body> Message: %(error_msg)s <form method="post" action="do_login"> Login: <input type="text" name="username" value="%(username)s" size="10" /> <br /> Password: <input type="password" name="password" size="10" /> <br /> <input type="hidden" name="from_page" value="%(from_page)s" /> <br /> <input type="submit" /> </form> </body></html>""") % vars()).encode('utf-8') def do_login(self, username, password, from_page='..', **kwargs): """Login. May raise redirect, or return True if request handled.""" response = cherrypy.serving.response error_msg = self.check_username_and_password(username, password) if error_msg: body = self.login_screen(from_page, username, error_msg) response.body = body if 'Content-Length' in response.headers: # Delete Content-Length header so finalize() recalcs it. del response.headers['Content-Length'] return True else: cherrypy.serving.request.login = username cherrypy.session[self.session_key] = username self.on_login(username) raise cherrypy.HTTPRedirect(from_page or '/') def do_logout(self, from_page='..', **kwargs): """Logout. May raise redirect, or return True if request handled.""" sess = cherrypy.session username = sess.get(self.session_key) sess[self.session_key] = None if username: cherrypy.serving.request.login = None self.on_logout(username) raise cherrypy.HTTPRedirect(from_page) def do_check(self): """Assert username. Raise redirect, or return True if request handled. """ sess = cherrypy.session request = cherrypy.serving.request response = cherrypy.serving.response username = sess.get(self.session_key) if not username: sess[self.session_key] = username = self.anonymous() self._debug_message('No session[username], trying anonymous') if not username: url = cherrypy.url(qs=request.query_string) self._debug_message( 'No username, routing to login_screen with from_page %(url)r', locals(), ) response.body = self.login_screen(url) if 'Content-Length' in response.headers: # Delete Content-Length header so finalize() recalcs it. del response.headers['Content-Length'] return True self._debug_message('Setting request.login to %(username)r', locals()) request.login = username self.on_check(username) def _debug_message(self, template, context={}): if not self.debug: return cherrypy.log(template % context, 'TOOLS.SESSAUTH') def run(self): request = cherrypy.serving.request response = cherrypy.serving.response path = request.path_info if path.endswith('login_screen'): self._debug_message('routing %(path)r to login_screen', locals()) response.body = self.login_screen() return True elif path.endswith('do_login'): if request.method != 'POST': response.headers['Allow'] = 'POST' self._debug_message('do_login requires POST') raise cherrypy.HTTPError(405) self._debug_message('routing %(path)r to do_login', locals()) return self.do_login(**request.params) elif path.endswith('do_logout'): if request.method != 'POST': response.headers['Allow'] = 'POST' raise cherrypy.HTTPError(405) self._debug_message('routing %(path)r to do_logout', locals()) return self.do_logout(**request.params) else: self._debug_message('No special path, running do_check') return self.do_check() def session_auth(**kwargs): sa = SessionAuth() for k, v in kwargs.items(): setattr(sa, k, v) return sa.run() session_auth.__doc__ = ( """Session authentication hook. Any attribute of the SessionAuth class may be overridden via a keyword arg to this function: """ + '\n'.join(['%s: %s' % (k, type(getattr(SessionAuth, k)).__name__) for k in dir(SessionAuth) if not k.startswith('__')]) ) def log_traceback(severity=logging.ERROR, debug=False): """Write the last error's traceback to the cherrypy error log.""" cherrypy.log('', 'HTTP', severity=severity, traceback=True) def log_request_headers(debug=False): """Write request headers to the cherrypy error log.""" h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list] cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP') def log_hooks(debug=False): """Write request.hooks to the cherrypy error log.""" request = cherrypy.serving.request msg = [] # Sort by the standard points if possible. from cherrypy import _cprequest points = _cprequest.hookpoints for k in request.hooks.keys(): if k not in points: points.append(k) for k in points: msg.append(' %s:' % k) v = request.hooks.get(k, []) v.sort() for h in v: msg.append(' %r' % h) cherrypy.log('\nRequest Hooks for ' + cherrypy.url() + ':\n' + '\n'.join(msg), 'HTTP') def redirect(url='', internal=True, debug=False): """Raise InternalRedirect or HTTPRedirect to the given url.""" if debug: cherrypy.log('Redirecting %sto: %s' % ({True: 'internal ', False: ''}[internal], url), 'TOOLS.REDIRECT') if internal: raise cherrypy.InternalRedirect(url) else: raise cherrypy.HTTPRedirect(url) def trailing_slash(missing=True, extra=False, status=None, debug=False): """Redirect if path_info has (missing|extra) trailing slash.""" request = cherrypy.serving.request pi = request.path_info if debug: cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' % (request.is_index, missing, extra, pi), 'TOOLS.TRAILING_SLASH') if request.is_index is True: if missing: if not pi.endswith('/'): new_url = cherrypy.url(pi + '/', request.query_string) raise cherrypy.HTTPRedirect(new_url, status=status or 301) elif request.is_index is False: if extra: # If pi == '/', don't redirect to ''! if pi.endswith('/') and pi != '/': new_url = cherrypy.url(pi[:-1], request.query_string) raise cherrypy.HTTPRedirect(new_url, status=status or 301) def flatten(debug=False): """Wrap response.body in a generator that recursively iterates over body. This allows cherrypy.response.body to consist of 'nested generators'; that is, a set of generators that yield generators. """ def flattener(input): numchunks = 0 for x in input: if not is_iterator(x): numchunks += 1 yield x else: for y in flattener(x): numchunks += 1 yield y if debug: cherrypy.log('Flattened %d chunks' % numchunks, 'TOOLS.FLATTEN') response = cherrypy.serving.response response.body = flattener(response.body) def accept(media=None, debug=False): """Return the client's preferred media-type (from the given Content-Types). If 'media' is None (the default), no test will be performed. If 'media' is provided, it should be the Content-Type value (as a string) or values (as a list or tuple of strings) which the current resource can emit. The client's acceptable media ranges (as declared in the Accept request header) will be matched in order to these Content-Type values; the first such string is returned. That is, the return value will always be one of the strings provided in the 'media' arg (or None if 'media' is None). If no match is found, then HTTPError 406 (Not Acceptable) is raised. Note that most web browsers send */* as a (low-quality) acceptable media range, which should match any Content-Type. In addition, "...if no Accept header field is present, then it is assumed that the client accepts all media types." Matching types are checked in order of client preference first, and then in the order of the given 'media' values. Note that this function does not honor accept-params (other than "q"). """ if not media: return if isinstance(media, text_or_bytes): media = [media] request = cherrypy.serving.request # Parse the Accept request header, and try to match one # of the requested media-ranges (in order of preference). ranges = request.headers.elements('Accept') if not ranges: # Any media type is acceptable. if debug: cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT') return media[0] else: # Note that 'ranges' is sorted in order of preference for element in ranges: if element.qvalue > 0: if element.value == '*/*': # Matches any type or subtype if debug: cherrypy.log('Match due to */*', 'TOOLS.ACCEPT') return media[0] elif element.value.endswith('/*'): # Matches any subtype mtype = element.value[:-1] # Keep the slash for m in media: if m.startswith(mtype): if debug: cherrypy.log('Match due to %s' % element.value, 'TOOLS.ACCEPT') return m else: # Matches exact value if element.value in media: if debug: cherrypy.log('Match due to %s' % element.value, 'TOOLS.ACCEPT') return element.value # No suitable media-range found. ah = request.headers.get('Accept') if ah is None: msg = 'Your client did not send an Accept header.' else: msg = 'Your client sent this Accept header: %s.' % ah msg += (' But this resource only emits these media types: %s.' % ', '.join(media)) raise cherrypy.HTTPError(406, msg) class MonitoredHeaderMap(_httputil.HeaderMap): def transform_key(self, key): self.accessed_headers.add(key) return super(MonitoredHeaderMap, self).transform_key(key) def __init__(self): self.accessed_headers = set() super(MonitoredHeaderMap, self).__init__() def autovary(ignore=None, debug=False): """Auto-populate the Vary response header based on request.header access. """ request = cherrypy.serving.request req_h = request.headers request.headers = MonitoredHeaderMap() request.headers.update(req_h) if ignore is None: ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type']) def set_response_header(): resp_h = cherrypy.serving.response.headers v = set([e.value for e in resp_h.elements('Vary')]) if debug: cherrypy.log( 'Accessed headers: %s' % request.headers.accessed_headers, 'TOOLS.AUTOVARY') v = v.union(request.headers.accessed_headers) v = v.difference(ignore) v = list(v) v.sort() resp_h['Vary'] = ', '.join(v) request.hooks.attach('before_finalize', set_response_header, 95) def convert_params(exception=ValueError, error=400): """Convert request params based on function annotations, with error handling. exception Exception class to catch. status The HTTP error code to return to the client on failure. """ request = cherrypy.serving.request types = request.handler.callable.__annotations__ with cherrypy.HTTPError.handle(exception, error): for key in set(types).intersection(request.params): request.params[key] = types[key](request.params[key])
"""Functions for builtin CherryPy tools.""" import logging import re from hashlib import md5 import six from six.moves import urllib import cherrypy from cherrypy._cpcompat import text_or_bytes from cherrypy.lib import httputil as _httputil from cherrypy.lib import is_iterator # Conditional HTTP request support # def validate_etags(autotags=False, debug=False): """Validate the current ETag against If-Match, If-None-Match headers. If autotags is True, an ETag response-header value will be provided from an MD5 hash of the response body (unless some other code has already provided an ETag header). If False (the default), the ETag will not be automatic. WARNING: the autotags feature is not designed for URL's which allow methods other than GET. For example, if a POST to the same URL returns no content, the automatic ETag will be incorrect, breaking a fundamental use for entity tags in a possibly destructive fashion. Likewise, if you raise 304 Not Modified, the response body will be empty, the ETag hash will be incorrect, and your application will break. See :rfc:`2616` Section 14.24. """ response = cherrypy.serving.response # Guard against being run twice. if hasattr(response, 'ETag'): return status, reason, msg = _httputil.valid_status(response.status) etag = response.headers.get('ETag') # Automatic ETag generation. See warning in docstring. if etag: if debug: cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS') elif not autotags: if debug: cherrypy.log('Autotags off', 'TOOLS.ETAGS') elif status != 200: if debug: cherrypy.log('Status not 200', 'TOOLS.ETAGS') else: etag = response.collapse_body() etag = '"%s"' % md5(etag).hexdigest() if debug: cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS') response.headers['ETag'] = etag response.ETag = etag # "If the request would, without the If-Match header field, result in # anything other than a 2xx or 412 status, then the If-Match header # MUST be ignored." if debug: cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS') if status >= 200 and status <= 299: request = cherrypy.serving.request conditions = request.headers.elements('If-Match') or [] conditions = [str(x) for x in conditions] if debug: cherrypy.log('If-Match conditions: %s' % repr(conditions), 'TOOLS.ETAGS') if conditions and not (conditions == ['*'] or etag in conditions): raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did ' 'not match %r' % (etag, conditions)) conditions = request.headers.elements('If-None-Match') or [] conditions = [str(x) for x in conditions] if debug: cherrypy.log('If-None-Match conditions: %s' % repr(conditions), 'TOOLS.ETAGS') if conditions == ['*'] or etag in conditions: if debug: cherrypy.log('request.method: %s' % request.method, 'TOOLS.ETAGS') if request.method in ('GET', 'HEAD'): raise cherrypy.HTTPRedirect([], 304) else: raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r ' 'matched %r' % (etag, conditions)) def validate_since(): """Validate the current Last-Modified against If-Modified-Since headers. If no code has set the Last-Modified response header, then no validation will be performed. """ response = cherrypy.serving.response lastmod = response.headers.get('Last-Modified') if lastmod: status, reason, msg = _httputil.valid_status(response.status) request = cherrypy.serving.request since = request.headers.get('If-Unmodified-Since') if since and since != lastmod: if (status >= 200 and status <= 299) or status == 412: raise cherrypy.HTTPError(412) since = request.headers.get('If-Modified-Since') if since and since == lastmod: if (status >= 200 and status <= 299) or status == 304: if request.method in ('GET', 'HEAD'): raise cherrypy.HTTPRedirect([], 304) else: raise cherrypy.HTTPError(412) # Tool code # def allow(methods=None, debug=False): """Raise 405 if request.method not in methods (default ['GET', 'HEAD']). The given methods are case-insensitive, and may be in any order. If only one method is allowed, you may supply a single string; if more than one, supply a list of strings. Regardless of whether the current method is allowed or not, this also emits an 'Allow' response header, containing the given methods. """ if not isinstance(methods, (tuple, list)): methods = [methods] methods = [m.upper() for m in methods if m] if not methods: methods = ['GET', 'HEAD'] elif 'GET' in methods and 'HEAD' not in methods: methods.append('HEAD') cherrypy.response.headers['Allow'] = ', '.join(methods) if cherrypy.request.method not in methods: if debug: cherrypy.log('request.method %r not in methods %r' % (cherrypy.request.method, methods), 'TOOLS.ALLOW') raise cherrypy.HTTPError(405) else: if debug: cherrypy.log('request.method %r in methods %r' % (cherrypy.request.method, methods), 'TOOLS.ALLOW') def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For', scheme='X-Forwarded-Proto', debug=False): """Change the base URL (scheme://host[:port][/path]). For running a CP server behind Apache, lighttpd, or other HTTP server. For Apache and lighttpd, you should leave the 'local' argument at the default value of 'X-Forwarded-Host'. For Squid, you probably want to set tools.proxy.local = 'Origin'. If you want the new request.base to include path info (not just the host), you must explicitly set base to the full base path, and ALSO set 'local' to '', so that the X-Forwarded-Host request header (which never includes path info) does not override it. Regardless, the value for 'base' MUST NOT end in a slash. cherrypy.request.remote.ip (the IP address of the client) will be rewritten if the header specified by the 'remote' arg is valid. By default, 'remote' is set to 'X-Forwarded-For'. If you do not want to rewrite remote.ip, set the 'remote' arg to an empty string. """ request = cherrypy.serving.request if scheme: s = request.headers.get(scheme, None) if debug: cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY') if s == 'on' and 'ssl' in scheme.lower(): # This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header scheme = 'https' else: # This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https' scheme = s if not scheme: scheme = request.base[:request.base.find('://')] if local: lbase = request.headers.get(local, None) if debug: cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY') if lbase is not None: base = lbase.split(',')[0] if not base: default = urllib.parse.urlparse(request.base).netloc base = request.headers.get('Host', default) if base.find('://') == -1: # add http:// or https:// if needed base = scheme + '://' + base request.base = base if remote: xff = request.headers.get(remote) if debug: cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY') if xff: if remote == 'X-Forwarded-For': # Grab the first IP in a comma-separated list. Ref #1268. xff = next(ip.strip() for ip in xff.split(',')) request.remote.ip = xff def ignore_headers(headers=('Range',), debug=False): """Delete request headers whose field names are included in 'headers'. This is a useful tool for working behind certain HTTP servers; for example, Apache duplicates the work that CP does for 'Range' headers, and will doubly-truncate the response. """ request = cherrypy.serving.request for name in headers: if name in request.headers: if debug: cherrypy.log('Ignoring request header %r' % name, 'TOOLS.IGNORE_HEADERS') del request.headers[name] def response_headers(headers=None, debug=False): """Set headers on the response.""" if debug: cherrypy.log('Setting response headers: %s' % repr(headers), 'TOOLS.RESPONSE_HEADERS') for name, value in (headers or []): cherrypy.serving.response.headers[name] = value response_headers.failsafe = True def referer(pattern, accept=True, accept_missing=False, error=403, message='Forbidden Referer header.', debug=False): """Raise HTTPError if Referer header does/does not match the given pattern. pattern A regular expression pattern to test against the Referer. accept If True, the Referer must match the pattern; if False, the Referer must NOT match the pattern. accept_missing If True, permit requests with no Referer header. error The HTTP error code to return to the client on failure. message A string to include in the response body on failure. """ try: ref = cherrypy.serving.request.headers['Referer'] match = bool(re.match(pattern, ref)) if debug: cherrypy.log('Referer %r matches %r' % (ref, pattern), 'TOOLS.REFERER') if accept == match: return except KeyError: if debug: cherrypy.log('No Referer header', 'TOOLS.REFERER') if accept_missing: return raise cherrypy.HTTPError(error, message) class SessionAuth(object): """Assert that the user is logged in.""" session_key = 'username' debug = False def check_username_and_password(self, username, password): pass def anonymous(self): """Provide a temporary user name for anonymous users.""" pass def on_login(self, username): pass def on_logout(self, username): pass def on_check(self, username): pass def login_screen(self, from_page='..', username='', error_msg='', **kwargs): return (six.text_type("""<html><body> Message: %(error_msg)s <form method="post" action="do_login"> Login: <input type="text" name="username" value="%(username)s" size="10" /> <br /> Password: <input type="password" name="password" size="10" /> <br /> <input type="hidden" name="from_page" value="%(from_page)s" /> <br /> <input type="submit" /> </form> </body></html>""") % vars()).encode('utf-8') def do_login(self, username, password, from_page='..', **kwargs): """Login. May raise redirect, or return True if request handled.""" response = cherrypy.serving.response error_msg = self.check_username_and_password(username, password) if error_msg: body = self.login_screen(from_page, username, error_msg) response.body = body if 'Content-Length' in response.headers: # Delete Content-Length header so finalize() recalcs it. del response.headers['Content-Length'] return True else: cherrypy.serving.request.login = username cherrypy.session[self.session_key] = username self.on_login(username) raise cherrypy.HTTPRedirect(from_page or '/') def do_logout(self, from_page='..', **kwargs): """Logout. May raise redirect, or return True if request handled.""" sess = cherrypy.session username = sess.get(self.session_key) sess[self.session_key] = None if username: cherrypy.serving.request.login = None self.on_logout(username) raise cherrypy.HTTPRedirect(from_page) def do_check(self): """Assert username. Raise redirect, or return True if request handled. """ sess = cherrypy.session request = cherrypy.serving.request response = cherrypy.serving.response username = sess.get(self.session_key) if not username: sess[self.session_key] = username = self.anonymous() self._debug_message('No session[username], trying anonymous') if not username: url = cherrypy.url(qs=request.query_string) self._debug_message( 'No username, routing to login_screen with from_page %(url)r', locals(), ) response.body = self.login_screen(url) if 'Content-Length' in response.headers: # Delete Content-Length header so finalize() recalcs it. del response.headers['Content-Length'] return True self._debug_message('Setting request.login to %(username)r', locals()) request.login = username self.on_check(username) def _debug_message(self, template, context={}): if not self.debug: return cherrypy.log(template % context, 'TOOLS.SESSAUTH') def run(self): request = cherrypy.serving.request response = cherrypy.serving.response path = request.path_info if path.endswith('login_screen'): self._debug_message('routing %(path)r to login_screen', locals()) response.body = self.login_screen() return True elif path.endswith('do_login'): if request.method != 'POST': response.headers['Allow'] = 'POST' self._debug_message('do_login requires POST') raise cherrypy.HTTPError(405) self._debug_message('routing %(path)r to do_login', locals()) return self.do_login(**request.params) elif path.endswith('do_logout'): if request.method != 'POST': response.headers['Allow'] = 'POST' raise cherrypy.HTTPError(405) self._debug_message('routing %(path)r to do_logout', locals()) return self.do_logout(**request.params) else: self._debug_message('No special path, running do_check') return self.do_check() def session_auth(**kwargs): sa = SessionAuth() for k, v in kwargs.items(): setattr(sa, k, v) return sa.run() session_auth.__doc__ = ( """Session authentication hook. Any attribute of the SessionAuth class may be overridden via a keyword arg to this function: """ + '\n'.join(['%s: %s' % (k, type(getattr(SessionAuth, k)).__name__) for k in dir(SessionAuth) if not k.startswith('__')]) ) def log_traceback(severity=logging.ERROR, debug=False): """Write the last error's traceback to the cherrypy error log.""" cherrypy.log('', 'HTTP', severity=severity, traceback=True) def log_request_headers(debug=False): """Write request headers to the cherrypy error log.""" h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list] cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP') def log_hooks(debug=False): """Write request.hooks to the cherrypy error log.""" request = cherrypy.serving.request msg = [] # Sort by the standard points if possible. from cherrypy import _cprequest points = _cprequest.hookpoints for k in request.hooks.keys(): if k not in points: points.append(k) for k in points: msg.append(' %s:' % k) v = request.hooks.get(k, []) v.sort() for h in v: msg.append(' %r' % h) cherrypy.log('\nRequest Hooks for ' + cherrypy.url() + ':\n' + '\n'.join(msg), 'HTTP') def redirect(url='', internal=True, debug=False): """Raise InternalRedirect or HTTPRedirect to the given url.""" if debug: cherrypy.log('Redirecting %sto: %s' % ({True: 'internal ', False: ''}[internal], url), 'TOOLS.REDIRECT') if internal: raise cherrypy.InternalRedirect(url) else: raise cherrypy.HTTPRedirect(url) def trailing_slash(missing=True, extra=False, status=None, debug=False): """Redirect if path_info has (missing|extra) trailing slash.""" request = cherrypy.serving.request pi = request.path_info if debug: cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' % (request.is_index, missing, extra, pi), 'TOOLS.TRAILING_SLASH') if request.is_index is True: if missing: if not pi.endswith('/'): new_url = cherrypy.url(pi + '/', request.query_string) raise cherrypy.HTTPRedirect(new_url, status=status or 301) elif request.is_index is False: if extra: # If pi == '/', don't redirect to ''! if pi.endswith('/') and pi != '/': new_url = cherrypy.url(pi[:-1], request.query_string) raise cherrypy.HTTPRedirect(new_url, status=status or 301) def flatten(debug=False): """Wrap response.body in a generator that recursively iterates over body. This allows cherrypy.response.body to consist of 'nested generators'; that is, a set of generators that yield generators. """ def flattener(input): numchunks = 0 for x in input: if not is_iterator(x): numchunks += 1 yield x else: for y in flattener(x): numchunks += 1 yield y if debug: cherrypy.log('Flattened %d chunks' % numchunks, 'TOOLS.FLATTEN') response = cherrypy.serving.response response.body = flattener(response.body) def accept(media=None, debug=False): """Return the client's preferred media-type (from the given Content-Types). If 'media' is None (the default), no test will be performed. If 'media' is provided, it should be the Content-Type value (as a string) or values (as a list or tuple of strings) which the current resource can emit. The client's acceptable media ranges (as declared in the Accept request header) will be matched in order to these Content-Type values; the first such string is returned. That is, the return value will always be one of the strings provided in the 'media' arg (or None if 'media' is None). If no match is found, then HTTPError 406 (Not Acceptable) is raised. Note that most web browsers send */* as a (low-quality) acceptable media range, which should match any Content-Type. In addition, "...if no Accept header field is present, then it is assumed that the client accepts all media types." Matching types are checked in order of client preference first, and then in the order of the given 'media' values. Note that this function does not honor accept-params (other than "q"). """ if not media: return if isinstance(media, text_or_bytes): media = [media] request = cherrypy.serving.request # Parse the Accept request header, and try to match one # of the requested media-ranges (in order of preference). ranges = request.headers.elements('Accept') if not ranges: # Any media type is acceptable. if debug: cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT') return media[0] else: # Note that 'ranges' is sorted in order of preference for element in ranges: if element.qvalue > 0: if element.value == '*/*': # Matches any type or subtype if debug: cherrypy.log('Match due to */*', 'TOOLS.ACCEPT') return media[0] elif element.value.endswith('/*'): # Matches any subtype mtype = element.value[:-1] # Keep the slash for m in media: if m.startswith(mtype): if debug: cherrypy.log('Match due to %s' % element.value, 'TOOLS.ACCEPT') return m else: # Matches exact value if element.value in media: if debug: cherrypy.log('Match due to %s' % element.value, 'TOOLS.ACCEPT') return element.value # No suitable media-range found. ah = request.headers.get('Accept') if ah is None: msg = 'Your client did not send an Accept header.' else: msg = 'Your client sent this Accept header: %s.' % ah msg += (' But this resource only emits these media types: %s.' % ', '.join(media)) raise cherrypy.HTTPError(406, msg) class MonitoredHeaderMap(_httputil.HeaderMap): def transform_key(self, key): self.accessed_headers.add(key) return super(MonitoredHeaderMap, self).transform_key(key) def __init__(self): self.accessed_headers = set() super(MonitoredHeaderMap, self).__init__() def autovary(ignore=None, debug=False): """Auto-populate the Vary response header based on request.header access. """ request = cherrypy.serving.request req_h = request.headers request.headers = MonitoredHeaderMap() request.headers.update(req_h) if ignore is None: ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type']) def set_response_header(): resp_h = cherrypy.serving.response.headers v = set([e.value for e in resp_h.elements('Vary')]) if debug: cherrypy.log( 'Accessed headers: %s' % request.headers.accessed_headers, 'TOOLS.AUTOVARY') v = v.union(request.headers.accessed_headers) v = v.difference(ignore) v = list(v) v.sort() resp_h['Vary'] = ', '.join(v) request.hooks.attach('before_finalize', set_response_header, 95) def convert_params(exception=ValueError, error=400): """Convert request params based on function annotations, with error handling. exception Exception class to catch. status The HTTP error code to return to the client on failure. """ request = cherrypy.serving.request types = request.handler.callable.__annotations__ with cherrypy.HTTPError.handle(exception, error): for key in set(types).intersection(request.params): request.params[key] = types[key](request.params[key])
en
0.775416
Functions for builtin CherryPy tools. # Conditional HTTP request support # Validate the current ETag against If-Match, If-None-Match headers. If autotags is True, an ETag response-header value will be provided from an MD5 hash of the response body (unless some other code has already provided an ETag header). If False (the default), the ETag will not be automatic. WARNING: the autotags feature is not designed for URL's which allow methods other than GET. For example, if a POST to the same URL returns no content, the automatic ETag will be incorrect, breaking a fundamental use for entity tags in a possibly destructive fashion. Likewise, if you raise 304 Not Modified, the response body will be empty, the ETag hash will be incorrect, and your application will break. See :rfc:`2616` Section 14.24. # Guard against being run twice. # Automatic ETag generation. See warning in docstring. # "If the request would, without the If-Match header field, result in # anything other than a 2xx or 412 status, then the If-Match header # MUST be ignored." Validate the current Last-Modified against If-Modified-Since headers. If no code has set the Last-Modified response header, then no validation will be performed. # Tool code # Raise 405 if request.method not in methods (default ['GET', 'HEAD']). The given methods are case-insensitive, and may be in any order. If only one method is allowed, you may supply a single string; if more than one, supply a list of strings. Regardless of whether the current method is allowed or not, this also emits an 'Allow' response header, containing the given methods. Change the base URL (scheme://host[:port][/path]). For running a CP server behind Apache, lighttpd, or other HTTP server. For Apache and lighttpd, you should leave the 'local' argument at the default value of 'X-Forwarded-Host'. For Squid, you probably want to set tools.proxy.local = 'Origin'. If you want the new request.base to include path info (not just the host), you must explicitly set base to the full base path, and ALSO set 'local' to '', so that the X-Forwarded-Host request header (which never includes path info) does not override it. Regardless, the value for 'base' MUST NOT end in a slash. cherrypy.request.remote.ip (the IP address of the client) will be rewritten if the header specified by the 'remote' arg is valid. By default, 'remote' is set to 'X-Forwarded-For'. If you do not want to rewrite remote.ip, set the 'remote' arg to an empty string. # This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header # This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https' # add http:// or https:// if needed # Grab the first IP in a comma-separated list. Ref #1268. Delete request headers whose field names are included in 'headers'. This is a useful tool for working behind certain HTTP servers; for example, Apache duplicates the work that CP does for 'Range' headers, and will doubly-truncate the response. Set headers on the response. Raise HTTPError if Referer header does/does not match the given pattern. pattern A regular expression pattern to test against the Referer. accept If True, the Referer must match the pattern; if False, the Referer must NOT match the pattern. accept_missing If True, permit requests with no Referer header. error The HTTP error code to return to the client on failure. message A string to include in the response body on failure. Assert that the user is logged in. Provide a temporary user name for anonymous users. <html><body> Message: %(error_msg)s <form method="post" action="do_login"> Login: <input type="text" name="username" value="%(username)s" size="10" /> <br /> Password: <input type="password" name="password" size="10" /> <br /> <input type="hidden" name="from_page" value="%(from_page)s" /> <br /> <input type="submit" /> </form> </body></html> Login. May raise redirect, or return True if request handled. # Delete Content-Length header so finalize() recalcs it. Logout. May raise redirect, or return True if request handled. Assert username. Raise redirect, or return True if request handled. # Delete Content-Length header so finalize() recalcs it. Session authentication hook. Any attribute of the SessionAuth class may be overridden via a keyword arg to this function: Write the last error's traceback to the cherrypy error log. Write request headers to the cherrypy error log. Write request.hooks to the cherrypy error log. # Sort by the standard points if possible. Raise InternalRedirect or HTTPRedirect to the given url. Redirect if path_info has (missing|extra) trailing slash. # If pi == '/', don't redirect to ''! Wrap response.body in a generator that recursively iterates over body. This allows cherrypy.response.body to consist of 'nested generators'; that is, a set of generators that yield generators. Return the client's preferred media-type (from the given Content-Types). If 'media' is None (the default), no test will be performed. If 'media' is provided, it should be the Content-Type value (as a string) or values (as a list or tuple of strings) which the current resource can emit. The client's acceptable media ranges (as declared in the Accept request header) will be matched in order to these Content-Type values; the first such string is returned. That is, the return value will always be one of the strings provided in the 'media' arg (or None if 'media' is None). If no match is found, then HTTPError 406 (Not Acceptable) is raised. Note that most web browsers send */* as a (low-quality) acceptable media range, which should match any Content-Type. In addition, "...if no Accept header field is present, then it is assumed that the client accepts all media types." Matching types are checked in order of client preference first, and then in the order of the given 'media' values. Note that this function does not honor accept-params (other than "q"). # Parse the Accept request header, and try to match one # of the requested media-ranges (in order of preference). # Any media type is acceptable. # Note that 'ranges' is sorted in order of preference # Matches any type or subtype # Matches any subtype # Keep the slash # Matches exact value # No suitable media-range found. Auto-populate the Vary response header based on request.header access. Convert request params based on function annotations, with error handling. exception Exception class to catch. status The HTTP error code to return to the client on failure.
2.398516
2
pyiomica/utilityFunctions.py
benstear/pyiomica
0
9885
'''Utility functions''' import multiprocessing from .globalVariables import * def readMathIOmicaData(fileName): '''Read text files exported by MathIOmica and convert to Python data Parameters: fileName: str Path of directories and name of the file containing data Returns: data Python data Usage: data = readMathIOmicaData("../../MathIOmica/MathIOmica/MathIOmicaData/ExampleData/rnaExample") ''' if os.path.isfile(fileName): with open(fileName, 'r') as tempFile: data = tempFile.read() data = data.replace('\n','').replace('{','(').replace('}',')').replace('->',':').replace('|>','}') data = data.replace('<|','{').replace('^','*').replace('`','*').replace('Missing[]','"Missing[]"') data = data.replace("\\",'') else: print('File not found (%s)'%(fileName)) returning = None try: returning = eval(data) except: print('Error occured while converting data (%s)'%(fileName)) return returning def runCPUs(NumberOfAvailableCPUs, func, list_of_tuples_of_func_params): """Parallelize function call with multiprocessing.Pool. Parameters: NumberOfAvailableCPUs: int Number of processes to create func: function Function to apply, must take at most one argument list_of_tuples_of_func_params: list Function parameters Returns: 2d numpy.array Results of func in a numpy array Usage: results = runCPUs(4, pAutocorrelation, [(times[i], data[i], allTimes) for i in range(10)]) """ instPool = multiprocessing.Pool(processes = NumberOfAvailableCPUs) return_values = instPool.map(func, list_of_tuples_of_func_params) instPool.close() instPool.join() return np.vstack(return_values) def createReverseDictionary(inputDictionary): """Efficient way to create a reverse dictionary from a dictionary. Utilizes Pandas.Dataframe.groupby and Numpy arrays indexing. Parameters: inputDictionary: dictionary Dictionary to reverse Returns: dictionary Reversed dictionary Usage: revDict = createReverseDictionary(Dict) """ keys, values = np.array(list(inputDictionary.keys())), np.array(list(inputDictionary.values())) df = pd.DataFrame(np.array([[keys[i], value] for i in range(len(keys)) for value in values[i]])) dfGrouped = df.groupby(df.columns[1]) keys, values = list(dfGrouped.indices.keys()), list(dfGrouped.indices.values()) GOs = df.values.T[0] return dict(zip(keys, [GOs[value].tolist() for value in values])) def createDirectories(path): """Create a path of directories, unless the path already exists. Parameters: path: str Path directory Returns: None Usage: createDirectories("/pathToFolder1/pathToSubFolder2") """ if path=='': return None if not os.path.exists(path): os.makedirs(path) return None
'''Utility functions''' import multiprocessing from .globalVariables import * def readMathIOmicaData(fileName): '''Read text files exported by MathIOmica and convert to Python data Parameters: fileName: str Path of directories and name of the file containing data Returns: data Python data Usage: data = readMathIOmicaData("../../MathIOmica/MathIOmica/MathIOmicaData/ExampleData/rnaExample") ''' if os.path.isfile(fileName): with open(fileName, 'r') as tempFile: data = tempFile.read() data = data.replace('\n','').replace('{','(').replace('}',')').replace('->',':').replace('|>','}') data = data.replace('<|','{').replace('^','*').replace('`','*').replace('Missing[]','"Missing[]"') data = data.replace("\\",'') else: print('File not found (%s)'%(fileName)) returning = None try: returning = eval(data) except: print('Error occured while converting data (%s)'%(fileName)) return returning def runCPUs(NumberOfAvailableCPUs, func, list_of_tuples_of_func_params): """Parallelize function call with multiprocessing.Pool. Parameters: NumberOfAvailableCPUs: int Number of processes to create func: function Function to apply, must take at most one argument list_of_tuples_of_func_params: list Function parameters Returns: 2d numpy.array Results of func in a numpy array Usage: results = runCPUs(4, pAutocorrelation, [(times[i], data[i], allTimes) for i in range(10)]) """ instPool = multiprocessing.Pool(processes = NumberOfAvailableCPUs) return_values = instPool.map(func, list_of_tuples_of_func_params) instPool.close() instPool.join() return np.vstack(return_values) def createReverseDictionary(inputDictionary): """Efficient way to create a reverse dictionary from a dictionary. Utilizes Pandas.Dataframe.groupby and Numpy arrays indexing. Parameters: inputDictionary: dictionary Dictionary to reverse Returns: dictionary Reversed dictionary Usage: revDict = createReverseDictionary(Dict) """ keys, values = np.array(list(inputDictionary.keys())), np.array(list(inputDictionary.values())) df = pd.DataFrame(np.array([[keys[i], value] for i in range(len(keys)) for value in values[i]])) dfGrouped = df.groupby(df.columns[1]) keys, values = list(dfGrouped.indices.keys()), list(dfGrouped.indices.values()) GOs = df.values.T[0] return dict(zip(keys, [GOs[value].tolist() for value in values])) def createDirectories(path): """Create a path of directories, unless the path already exists. Parameters: path: str Path directory Returns: None Usage: createDirectories("/pathToFolder1/pathToSubFolder2") """ if path=='': return None if not os.path.exists(path): os.makedirs(path) return None
en
0.485753
Utility functions Read text files exported by MathIOmica and convert to Python data Parameters: fileName: str Path of directories and name of the file containing data Returns: data Python data Usage: data = readMathIOmicaData("../../MathIOmica/MathIOmica/MathIOmicaData/ExampleData/rnaExample") Parallelize function call with multiprocessing.Pool. Parameters: NumberOfAvailableCPUs: int Number of processes to create func: function Function to apply, must take at most one argument list_of_tuples_of_func_params: list Function parameters Returns: 2d numpy.array Results of func in a numpy array Usage: results = runCPUs(4, pAutocorrelation, [(times[i], data[i], allTimes) for i in range(10)]) Efficient way to create a reverse dictionary from a dictionary. Utilizes Pandas.Dataframe.groupby and Numpy arrays indexing. Parameters: inputDictionary: dictionary Dictionary to reverse Returns: dictionary Reversed dictionary Usage: revDict = createReverseDictionary(Dict) Create a path of directories, unless the path already exists. Parameters: path: str Path directory Returns: None Usage: createDirectories("/pathToFolder1/pathToSubFolder2")
2.947515
3
CRNitschke/get_sextract_thresholds.py
deapplegate/wtgpipeline
1
9886
<reponame>deapplegate/wtgpipeline #! /usr/bin/env python #adam-does# runs SeeingClearly to get the seeing and rms of the image, then uses those to get sextractor thresholds for CR detection #adam-use# use with CRNitschke pipeline #adam-call_example# call it like ./get_sextract_thresholds.py /path/flname.fits output_file.txt #IO stuff: import sys ; sys.path.append('/u/ki/awright/InstallingSoftware/pythons') ###saveout = sys.stdout saveout = sys.stdout ###logout = open('SeeingClearly_stdout.log','w') ###sys.stdout = logout saveerr = sys.stderr ###logerr = open('SeeingClearly_stderr.log','w') ###sys.stderr = logerr sys.stdout = sys.stderr #the basics import hashlib import os import SeeingClearly from copy import deepcopy def seeing_to_ft_dt(x): y1_dt,m_dt,x1_dt= 5900, -16551.7, 0.48 min_dt= 3500 max_dt= 6000 yy_dts=y1_dt+m_dt*(x-x1_dt) if yy_dts<min_dt:yy_dts=min_dt if yy_dts>max_dt:yy_dts=max_dt y1_ft,m_ft,x1_ft,min_ft= 850, -7000.0, 0.48, 450 min_ft= 450 max_ft= 1000 yy_fts=y1_ft+m_ft*(x-x1_ft) if yy_fts<min_ft:yy_fts=min_ft if yy_fts>max_ft:yy_fts=max_ft return yy_fts,yy_dts import imagetools import glob import astropy from astropy.io import ascii from numpy import asarray if __name__ == "__main__": args=deepcopy(sys.argv[1:]) for false_arg in ['-i', '--']: if false_arg in args: args.remove(false_arg) if len(args)<1: sys.exit() if not os.path.isfile(args[0]): print "sys.argv[1]=",args[0] raise Exception(args[0]+" is not a file!") else: fl=args[0] fl2save=args[1] #start tmp print "Using SeeingClearly to get seeing for: "+fl print "saving output to: " +fl2save try: FILTER=astropy.io.fits.open(fl)[0].header['FILTER'] except: FILTER="UnknownFilt" BASE,ending=os.path.basename(fl).split('OCF') ending="OCF"+ending ending=ending.replace('.fits','') fls_dir=os.path.dirname(fl) basename=os.path.basename(fl) CCDnum=imagetools.GetCCD(fl) globthis='_'+str(CCDnum) glob_basename=basename.replace(globthis,'_*') fls=sorted(glob.glob(fls_dir+"/"+glob_basename)) if not len(fls)==10: raise Exception('cannot find 10 files like this from different CCDs') #adam-old# seeing,back_rms=SeeingClearly.seeing_clearly_withplot(fls,checkplots=1,saveas='pltSeeingClearly_%s_%s' % (FILTER,BASE[:-1]+"ALL")) import adam_stars_from_cat import numpy seeing,back_rms=adam_stars_from_cat.get_seeing_backrms(fls) back_rms=numpy.array(back_rms) ft,dt=seeing_to_ft_dt(seeing) detect_thresh=dt/back_rms #convert to S2N ratio filter_thresh=ft/back_rms #convert to S2N ratio if FILTER=='W-J-B': detect_thresh=asarray([min(170.0,detect_thresh[i]) for i in range(len(detect_thresh))]) filter_thresh=asarray([min(20.0,filter_thresh[i]) for i in range(len(filter_thresh))]) elif (detect_thresh>170.0).any() or (filter_thresh>20.0).any(): print 'checkit: filter=%s and %.2f %% of the detection thresholds are above 170.0 and %.2f %% of the filter thresholds are above 20.0' % (FILTER,(detect_thresh>170.0).mean()*100, (filter_thresh>20.0).mean()*100) dict_out={} dict_out['seeing']=[seeing]*10 dict_out['rms']=back_rms dict_out['dt']=detect_thresh dict_out['ft']=filter_thresh dict_out['#files']=fls t=astropy.table.Table(data=dict_out,names=['#files','rms','seeing','dt','ft'],dtype=[str,float,float,float,float]) t.write(fl2save,format="ascii.basic") #adam-2014#detect_thresh_cap=min(detect_thresh,150.0) #cap is now set in the function seeing_to_ft_dt #PIXSCALE=float(os.environ['PIXSCALE']) #if seeing>PIXSCALE*2.5: #I have no check for being undersampled, should I? #if seeing>.4: # sys.stdout=saveout #back to printing to terminal # ###sys.stdout.write(str(seeing)) # print "'0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)+"'" # #else: # #print "exit 1;" # #raise Exception('Seeing less than 2.5xPIXSCALE. The image is undersampled') # #sys.stderr=saveerr #back to printing to terminal # #sys.stderr.write('1') # sys.stdout=saveout #back to printing to terminal # print "0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)
#! /usr/bin/env python #adam-does# runs SeeingClearly to get the seeing and rms of the image, then uses those to get sextractor thresholds for CR detection #adam-use# use with CRNitschke pipeline #adam-call_example# call it like ./get_sextract_thresholds.py /path/flname.fits output_file.txt #IO stuff: import sys ; sys.path.append('/u/ki/awright/InstallingSoftware/pythons') ###saveout = sys.stdout saveout = sys.stdout ###logout = open('SeeingClearly_stdout.log','w') ###sys.stdout = logout saveerr = sys.stderr ###logerr = open('SeeingClearly_stderr.log','w') ###sys.stderr = logerr sys.stdout = sys.stderr #the basics import hashlib import os import SeeingClearly from copy import deepcopy def seeing_to_ft_dt(x): y1_dt,m_dt,x1_dt= 5900, -16551.7, 0.48 min_dt= 3500 max_dt= 6000 yy_dts=y1_dt+m_dt*(x-x1_dt) if yy_dts<min_dt:yy_dts=min_dt if yy_dts>max_dt:yy_dts=max_dt y1_ft,m_ft,x1_ft,min_ft= 850, -7000.0, 0.48, 450 min_ft= 450 max_ft= 1000 yy_fts=y1_ft+m_ft*(x-x1_ft) if yy_fts<min_ft:yy_fts=min_ft if yy_fts>max_ft:yy_fts=max_ft return yy_fts,yy_dts import imagetools import glob import astropy from astropy.io import ascii from numpy import asarray if __name__ == "__main__": args=deepcopy(sys.argv[1:]) for false_arg in ['-i', '--']: if false_arg in args: args.remove(false_arg) if len(args)<1: sys.exit() if not os.path.isfile(args[0]): print "sys.argv[1]=",args[0] raise Exception(args[0]+" is not a file!") else: fl=args[0] fl2save=args[1] #start tmp print "Using SeeingClearly to get seeing for: "+fl print "saving output to: " +fl2save try: FILTER=astropy.io.fits.open(fl)[0].header['FILTER'] except: FILTER="UnknownFilt" BASE,ending=os.path.basename(fl).split('OCF') ending="OCF"+ending ending=ending.replace('.fits','') fls_dir=os.path.dirname(fl) basename=os.path.basename(fl) CCDnum=imagetools.GetCCD(fl) globthis='_'+str(CCDnum) glob_basename=basename.replace(globthis,'_*') fls=sorted(glob.glob(fls_dir+"/"+glob_basename)) if not len(fls)==10: raise Exception('cannot find 10 files like this from different CCDs') #adam-old# seeing,back_rms=SeeingClearly.seeing_clearly_withplot(fls,checkplots=1,saveas='pltSeeingClearly_%s_%s' % (FILTER,BASE[:-1]+"ALL")) import adam_stars_from_cat import numpy seeing,back_rms=adam_stars_from_cat.get_seeing_backrms(fls) back_rms=numpy.array(back_rms) ft,dt=seeing_to_ft_dt(seeing) detect_thresh=dt/back_rms #convert to S2N ratio filter_thresh=ft/back_rms #convert to S2N ratio if FILTER=='W-J-B': detect_thresh=asarray([min(170.0,detect_thresh[i]) for i in range(len(detect_thresh))]) filter_thresh=asarray([min(20.0,filter_thresh[i]) for i in range(len(filter_thresh))]) elif (detect_thresh>170.0).any() or (filter_thresh>20.0).any(): print 'checkit: filter=%s and %.2f %% of the detection thresholds are above 170.0 and %.2f %% of the filter thresholds are above 20.0' % (FILTER,(detect_thresh>170.0).mean()*100, (filter_thresh>20.0).mean()*100) dict_out={} dict_out['seeing']=[seeing]*10 dict_out['rms']=back_rms dict_out['dt']=detect_thresh dict_out['ft']=filter_thresh dict_out['#files']=fls t=astropy.table.Table(data=dict_out,names=['#files','rms','seeing','dt','ft'],dtype=[str,float,float,float,float]) t.write(fl2save,format="ascii.basic") #adam-2014#detect_thresh_cap=min(detect_thresh,150.0) #cap is now set in the function seeing_to_ft_dt #PIXSCALE=float(os.environ['PIXSCALE']) #if seeing>PIXSCALE*2.5: #I have no check for being undersampled, should I? #if seeing>.4: # sys.stdout=saveout #back to printing to terminal # ###sys.stdout.write(str(seeing)) # print "'0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)+"'" # #else: # #print "exit 1;" # #raise Exception('Seeing less than 2.5xPIXSCALE. The image is undersampled') # #sys.stderr=saveerr #back to printing to terminal # #sys.stderr.write('1') # sys.stdout=saveout #back to printing to terminal # print "0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)
en
0.579151
#! /usr/bin/env python #adam-does# runs SeeingClearly to get the seeing and rms of the image, then uses those to get sextractor thresholds for CR detection #adam-use# use with CRNitschke pipeline #adam-call_example# call it like ./get_sextract_thresholds.py /path/flname.fits output_file.txt #IO stuff: ###saveout = sys.stdout ###logout = open('SeeingClearly_stdout.log','w') ###sys.stdout = logout ###logerr = open('SeeingClearly_stderr.log','w') ###sys.stderr = logerr #the basics #start tmp #adam-old# seeing,back_rms=SeeingClearly.seeing_clearly_withplot(fls,checkplots=1,saveas='pltSeeingClearly_%s_%s' % (FILTER,BASE[:-1]+"ALL")) #convert to S2N ratio #convert to S2N ratio #adam-2014#detect_thresh_cap=min(detect_thresh,150.0) #cap is now set in the function seeing_to_ft_dt #PIXSCALE=float(os.environ['PIXSCALE']) #if seeing>PIXSCALE*2.5: #I have no check for being undersampled, should I? #if seeing>.4: # sys.stdout=saveout #back to printing to terminal # ###sys.stdout.write(str(seeing)) # print "'0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)+"'" # #else: # #print "exit 1;" # #raise Exception('Seeing less than 2.5xPIXSCALE. The image is undersampled') # #sys.stderr=saveerr #back to printing to terminal # #sys.stderr.write('1') # sys.stdout=saveout #back to printing to terminal # print "0 "+str(back_rms)+" "+str(seeing)+" "+str(detect_thresh)+" "+str(filter_thresh)
2.27265
2
python/labbox/api/_session.py
flatironinstitute/labbox
1
9887
<filename>python/labbox/api/_session.py import time import multiprocessing class Session: def __init__(self, *, labbox_config, default_feed_name: str): self._labbox_config = labbox_config pipe_to_parent, pipe_to_child = multiprocessing.Pipe() self._worker_process = multiprocessing.Process(target=_run_worker_session, args=(pipe_to_parent, labbox_config, default_feed_name)) self._worker_process.start() self._pipe_to_worker_process = pipe_to_child self._incoming_keepalive_timestamp = time.time() def elapsed_sec_since_incoming_keepalive(self): return time.time() - self._incoming_keepalive_timestamp def cleanup(self): self._pipe_to_worker_process.send('exit') pass def check_for_outgoing_messages(self): ret = [] while self._pipe_to_worker_process.poll(): msg = self._pipe_to_worker_process.recv() if isinstance(msg, dict): if msg['type'] == 'outgoing_messages': ret.extend(msg['messages']) else: print(msg) raise Exception('Unexpected message from worker session') else: print(msg) raise Exception('Unexpected message from worker session') return ret def handle_message(self, msg): if msg['type'] == 'keepAlive': self._handle_keepalive() else: self._pipe_to_worker_process.send(dict( type='incoming_message', message=msg )) def _handle_keepalive(self): self._incoming_keepalive_timestamp = time.time() def _run_worker_session(pipe_to_parent, labbox_config, default_feed_name: str): from ._workersession import WorkerSession WS = WorkerSession(labbox_config=labbox_config, default_feed_name=default_feed_name) def handle_messages(msgs): pipe_to_parent.send(dict( type='outgoing_messages', messages=msgs )) WS.on_messages(handle_messages) WS.initialize() while True: while pipe_to_parent.poll(): x = pipe_to_parent.recv() if isinstance(x, str): if x == 'exit': WS.cleanup() return else: print(x) raise Exception('Unexpected message in _run_worker_session') elif isinstance(x, dict): if x['type'] == 'incoming_message': WS.handle_message(x['message']) else: print(x) raise Exception('Unexpected message in _run_worker_session') else: print(x) raise Exception('Unexpected message in _run_worker_session') WS.iterate() time.sleep(0.05)
<filename>python/labbox/api/_session.py import time import multiprocessing class Session: def __init__(self, *, labbox_config, default_feed_name: str): self._labbox_config = labbox_config pipe_to_parent, pipe_to_child = multiprocessing.Pipe() self._worker_process = multiprocessing.Process(target=_run_worker_session, args=(pipe_to_parent, labbox_config, default_feed_name)) self._worker_process.start() self._pipe_to_worker_process = pipe_to_child self._incoming_keepalive_timestamp = time.time() def elapsed_sec_since_incoming_keepalive(self): return time.time() - self._incoming_keepalive_timestamp def cleanup(self): self._pipe_to_worker_process.send('exit') pass def check_for_outgoing_messages(self): ret = [] while self._pipe_to_worker_process.poll(): msg = self._pipe_to_worker_process.recv() if isinstance(msg, dict): if msg['type'] == 'outgoing_messages': ret.extend(msg['messages']) else: print(msg) raise Exception('Unexpected message from worker session') else: print(msg) raise Exception('Unexpected message from worker session') return ret def handle_message(self, msg): if msg['type'] == 'keepAlive': self._handle_keepalive() else: self._pipe_to_worker_process.send(dict( type='incoming_message', message=msg )) def _handle_keepalive(self): self._incoming_keepalive_timestamp = time.time() def _run_worker_session(pipe_to_parent, labbox_config, default_feed_name: str): from ._workersession import WorkerSession WS = WorkerSession(labbox_config=labbox_config, default_feed_name=default_feed_name) def handle_messages(msgs): pipe_to_parent.send(dict( type='outgoing_messages', messages=msgs )) WS.on_messages(handle_messages) WS.initialize() while True: while pipe_to_parent.poll(): x = pipe_to_parent.recv() if isinstance(x, str): if x == 'exit': WS.cleanup() return else: print(x) raise Exception('Unexpected message in _run_worker_session') elif isinstance(x, dict): if x['type'] == 'incoming_message': WS.handle_message(x['message']) else: print(x) raise Exception('Unexpected message in _run_worker_session') else: print(x) raise Exception('Unexpected message in _run_worker_session') WS.iterate() time.sleep(0.05)
none
1
2.463666
2
aldryn_newsblog/tests/test_reversion.py
GabrielDumbrava/aldryn-newsblog
0
9888
# -*- coding: utf-8 -*- from __future__ import unicode_literals from unittest import skipIf try: from django.core.urlresolvers import reverse except ModuleNotFoundError: from django.urls import reverse from django.db import transaction from aldryn_reversion.core import create_revision as aldryn_create_revision from parler.utils.context import switch_language import six from . import NewsBlogTestCase from aldryn_newsblog.cms_appconfig import NewsBlogConfig from ..settings import ENABLE_REVERSION if ENABLE_REVERSION: try: from reversion import create_revision from reversion import default_revision_manager except ImportError: from reversion.revisions import create_revision from reversion.revisions import default_revision_manager @skipIf(not ENABLE_REVERSION, 'django-reversion not enabled') class TestVersioning(NewsBlogTestCase): def create_revision(self, article, content=None, language=None, **kwargs): with transaction.atomic(): with create_revision(): for k, v in six.iteritems(kwargs): setattr(article, k, v) if content: plugins = article.content.get_plugins() plugin = plugins[0].get_plugin_instance()[0] plugin.body = content plugin.save() # TODO: Cover both cases (plugin modification/recreation) # if content: # article.content.get_plugins().delete() # api.add_plugin(article.content, 'TextPlugin', # self.language, body=content) article.save() def revert_to(self, article, revision): (default_revision_manager.get_for_object(article)[revision] .revision.revert()) def test_revert_revision(self): title1 = self.rand_str(prefix='title1_') title2 = self.rand_str(prefix='title2_') content0 = self.rand_str(prefix='content0_') content1 = self.rand_str(prefix='content1_') content2 = self.rand_str(prefix='content2_') article = self.create_article(content=content0) # Revision 1 self.create_revision(article, title=title1, content=content1) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1) self.assertContains(response, content1) self.assertNotContains(response, content0) # Revision 2 self.create_revision(article, title=title2, content=content2) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2) self.assertContains(response, content2) self.assertNotContains(response, content1) # Revert to revision 1 self.revert_to(article, 1) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1) self.assertContains(response, content1) self.assertNotContains(response, content0) self.assertNotContains(response, content2) def test_revert_translated_revision(self): title1_en = self.rand_str(prefix='title1_en_') title1_de = self.rand_str(prefix='title1_de_') title2_en = self.rand_str(prefix='title2_en_') title2_de = self.rand_str(prefix='title2_de_') article = self.create_article() # Revision 1 article.set_current_language('en') self.create_revision(article, title=title1_en) article.set_current_language('de') self.create_revision(article, title=title1_de) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) # Revision 2a (modify just EN) article.set_current_language('en') self.create_revision(article, title=title2_en) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) # Revision 2b (modify just DE) article.set_current_language('de') self.create_revision(article, title=title2_de) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_de) # Revert to revision 2a (EN=2, DE=1) self.revert_to(article, 1) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) # Revert to revision 1 (EN=1, DE=1) self.revert_to(article, 2) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) def test_edit_plugin_directly(self): content0 = self.rand_str(prefix='content0_') content1 = self.rand_str(prefix='content1_') content2 = self.rand_str(prefix='content2_') article = self.create_article(content=content0) # Revision 1 self.create_revision(article, content=content1) self.assertEqual( len(default_revision_manager.get_for_object(article)), 1) # Revision 2 with transaction.atomic(): plugins = article.content.get_plugins() plugin = plugins[0].get_plugin_instance()[0] plugin.body = content2 plugin.save() aldryn_create_revision(article) self.assertEqual( len(default_revision_manager.get_for_object(article)), 2) response = self.client.get(article.get_absolute_url()) self.assertContains(response, content2) self.assertNotContains(response, content1) # Revert to revision 1 self.revert_to(article, 1) response = self.client.get(article.get_absolute_url()) self.assertContains(response, content1) self.assertNotContains(response, content2) def test_blog_config_recovery_accessible(self): with transaction.atomic(): with create_revision(): new_conf = NewsBlogConfig( namespace='test_revocery_admin_url', paginate_by=15) new_conf.save() new_config_version = (default_revision_manager .get_for_object(new_conf)[0]) new_config_pk = new_conf.pk self.assertEqual(NewsBlogConfig.objects.filter( pk=new_config_pk).count(), 1) new_conf.delete() self.assertEqual(NewsBlogConfig.objects.filter( pk=new_config_pk).count(), 0) # check that there is a a way to access recovery view obj = new_config_version.object_version.object opts = obj._meta url = reverse( 'admin:{0}_{1}_{2}'.format( opts.app_label, obj._meta.model_name, 'recover'), args=[new_config_version.pk]) # ust in case check the length, but at this step either a # NoReverseMatch should occur or other error, # if no exception is raised, it is a good sign self.assertGreater(len(url), 4)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from unittest import skipIf try: from django.core.urlresolvers import reverse except ModuleNotFoundError: from django.urls import reverse from django.db import transaction from aldryn_reversion.core import create_revision as aldryn_create_revision from parler.utils.context import switch_language import six from . import NewsBlogTestCase from aldryn_newsblog.cms_appconfig import NewsBlogConfig from ..settings import ENABLE_REVERSION if ENABLE_REVERSION: try: from reversion import create_revision from reversion import default_revision_manager except ImportError: from reversion.revisions import create_revision from reversion.revisions import default_revision_manager @skipIf(not ENABLE_REVERSION, 'django-reversion not enabled') class TestVersioning(NewsBlogTestCase): def create_revision(self, article, content=None, language=None, **kwargs): with transaction.atomic(): with create_revision(): for k, v in six.iteritems(kwargs): setattr(article, k, v) if content: plugins = article.content.get_plugins() plugin = plugins[0].get_plugin_instance()[0] plugin.body = content plugin.save() # TODO: Cover both cases (plugin modification/recreation) # if content: # article.content.get_plugins().delete() # api.add_plugin(article.content, 'TextPlugin', # self.language, body=content) article.save() def revert_to(self, article, revision): (default_revision_manager.get_for_object(article)[revision] .revision.revert()) def test_revert_revision(self): title1 = self.rand_str(prefix='title1_') title2 = self.rand_str(prefix='title2_') content0 = self.rand_str(prefix='content0_') content1 = self.rand_str(prefix='content1_') content2 = self.rand_str(prefix='content2_') article = self.create_article(content=content0) # Revision 1 self.create_revision(article, title=title1, content=content1) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1) self.assertContains(response, content1) self.assertNotContains(response, content0) # Revision 2 self.create_revision(article, title=title2, content=content2) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2) self.assertContains(response, content2) self.assertNotContains(response, content1) # Revert to revision 1 self.revert_to(article, 1) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1) self.assertContains(response, content1) self.assertNotContains(response, content0) self.assertNotContains(response, content2) def test_revert_translated_revision(self): title1_en = self.rand_str(prefix='title1_en_') title1_de = self.rand_str(prefix='title1_de_') title2_en = self.rand_str(prefix='title2_en_') title2_de = self.rand_str(prefix='title2_de_') article = self.create_article() # Revision 1 article.set_current_language('en') self.create_revision(article, title=title1_en) article.set_current_language('de') self.create_revision(article, title=title1_de) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) # Revision 2a (modify just EN) article.set_current_language('en') self.create_revision(article, title=title2_en) response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) # Revision 2b (modify just DE) article.set_current_language('de') self.create_revision(article, title=title2_de) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_de) # Revert to revision 2a (EN=2, DE=1) self.revert_to(article, 1) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title2_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) # Revert to revision 1 (EN=1, DE=1) self.revert_to(article, 2) with switch_language(article, 'en'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_en) with switch_language(article, 'de'): response = self.client.get(article.get_absolute_url()) self.assertContains(response, title1_de) def test_edit_plugin_directly(self): content0 = self.rand_str(prefix='content0_') content1 = self.rand_str(prefix='content1_') content2 = self.rand_str(prefix='content2_') article = self.create_article(content=content0) # Revision 1 self.create_revision(article, content=content1) self.assertEqual( len(default_revision_manager.get_for_object(article)), 1) # Revision 2 with transaction.atomic(): plugins = article.content.get_plugins() plugin = plugins[0].get_plugin_instance()[0] plugin.body = content2 plugin.save() aldryn_create_revision(article) self.assertEqual( len(default_revision_manager.get_for_object(article)), 2) response = self.client.get(article.get_absolute_url()) self.assertContains(response, content2) self.assertNotContains(response, content1) # Revert to revision 1 self.revert_to(article, 1) response = self.client.get(article.get_absolute_url()) self.assertContains(response, content1) self.assertNotContains(response, content2) def test_blog_config_recovery_accessible(self): with transaction.atomic(): with create_revision(): new_conf = NewsBlogConfig( namespace='test_revocery_admin_url', paginate_by=15) new_conf.save() new_config_version = (default_revision_manager .get_for_object(new_conf)[0]) new_config_pk = new_conf.pk self.assertEqual(NewsBlogConfig.objects.filter( pk=new_config_pk).count(), 1) new_conf.delete() self.assertEqual(NewsBlogConfig.objects.filter( pk=new_config_pk).count(), 0) # check that there is a a way to access recovery view obj = new_config_version.object_version.object opts = obj._meta url = reverse( 'admin:{0}_{1}_{2}'.format( opts.app_label, obj._meta.model_name, 'recover'), args=[new_config_version.pk]) # ust in case check the length, but at this step either a # NoReverseMatch should occur or other error, # if no exception is raised, it is a good sign self.assertGreater(len(url), 4)
en
0.664263
# -*- coding: utf-8 -*- # TODO: Cover both cases (plugin modification/recreation) # if content: # article.content.get_plugins().delete() # api.add_plugin(article.content, 'TextPlugin', # self.language, body=content) # Revision 1 # Revision 2 # Revert to revision 1 # Revision 1 # Revision 2a (modify just EN) # Revision 2b (modify just DE) # Revert to revision 2a (EN=2, DE=1) # Revert to revision 1 (EN=1, DE=1) # Revision 1 # Revision 2 # Revert to revision 1 # check that there is a a way to access recovery view # ust in case check the length, but at this step either a # NoReverseMatch should occur or other error, # if no exception is raised, it is a good sign
2.237348
2
network/network.py
VirtualEmbryo/lumen_network
1
9889
# Library for the dynamics of a lumen network # The lumen are 2 dimensional and symmetric and connected with 1 dimensional tubes # # Created by <NAME>, 2018 # Modified by <NAME>--Serandour on 8/04/2019 """ network.py conf.init Defines the class network and associated functions Imports ------- Libraries : numpy, os, math Created by <NAME> Modified by <NAME> on 8/06/2018 Modified by <NAME>--Serandour on 8/04/2019 """ import numpy as np import math import os class network: def __init__(self, network_folder, out_path, t_step, tube_radius = 0.01, friction = 1, swelling = False, swelling_rate=0., save_area_dat=False): """ Initialization of the object network All properties needed for the simulation are read and initialized Input ----- network_folder : str out_path : str, path-like t_step : float Time step of the simulation. Note that if the simulation is adaptative, this time step will change. tube_radius : float, optional, default = 0.01 Radius of the tube connecting lumens. Define the condition for empty lumens. friction : float, optional, default = 1 Friction constant for the fluid circulating through pipes. swelling : bool, optional, default = False Swelling option for the simulation. True if swelling is included, False otherwise. swelling_rate : float, optional, default = 0. Swelling rate value in case the swelling is considered. Make sure the rate is not to big to avoid non-converging simulations. save_area_dat : bool, optional, default = False Save area option. True if areas are saved in area.dat, False otherwise. """ self.network_folder = network_folder # Reading properties of the lumen self.gamma_lumen, self.gamma_contact, self.area = np.loadtxt(os.path.join(network_folder, 'lumen.dat'), dtype = float, usecols = [0,2,3], unpack = True) # Reading links between two lumen self.lumen_lumen = self.read_lumen_lumen(os.path.join(network_folder, 'lumen_lumen.dat')) # Reading links between bridge and lumen self.bridge_lumen, self.num_bridges = self.read_bridge_lumen(os.path.join(network_folder, 'bridge_lumen.dat')) # Reading links between two bridges self.bridge_bridge, self.num_bridges = self.read_bridge_bridge(os.path.join(network_folder, 'bridge_bridge.dat'), self.num_bridges) # Surface tension ratio self.alpha = self.gamma_contact/(2*self.gamma_lumen) self.delta = np.full(len(self.alpha), 1) # Possibility of asymmetric lumen is not included # Resistances self.tube_radius = tube_radius # Radius of the tube connecting the lumen and the bridges self.friction = friction # Friction coefficient; friction * length = resistance # Opening angle of the lumen (angle between curvature and tube) self.theta = self.set_theta() # Area factor for expressing the pressure in terms of the area instead of the radius self.area_factor = self.set_area_factor() # Ending time: time at which only one lumen is remaining self.end_time = 0 # Time step for the output of the area evolution self.time_step = t_step # Creating output file for the area evolution, events, error messages self.save_area(start = True, out_path = out_path) self.save_event('', start = True, out_path = out_path) self.save_error('', start = True, out_path = out_path) # Area distribution after only one lumen is remaining self.final_area = [] # Current time step of the simulation self.current_time = 0 # List of empty lumen (area < tube_radius **2) self.empty_list = np.zeros(len(self.alpha)) # Swelling self.swelling_bool = swelling self.swelling_rate = swelling_rate # Save area self.save_area_dat = save_area_dat ############################################################################################################################ ########################################################## Dynamics ######################################################## ############################################################################################################################ def flux(self, t, state): """ Determines the flux/ area change for each lumen of the network, main function of network.py Input ----- self : network object Needs to be called by a class object t : float Actual time step (not needed for the calculation of the flux, but required for the used integration method in network_simulation.py state : float array The current area of the lumens Returns ------- flux : float array Contains the area change for each lumen in dt """ # Initialization of the array containing the area change (index == lumen ID) flux = [] self.current_time = t for i in range(len(self.alpha)): flux.append(0) # If only one lumen remains -> End of simulation, flux is zero (needed as for the integration method used, no dynamic stop is possible) if(np.sum(self.empty_list) >= len(self.alpha) - 1): if(self.end_time == 0): # Setting the end time for the output file area.log self.end_time = t # more than one lumen remaining: calculation of the flux else: # Adapting network to new state: Empty lumen are removed and graph is reconnected self.area = state self.remove_empty_lumen() # Area change between directly connected lumen flux = self.flux_lumen(flux) # Calculating artificial pressure at each bridge; linear system of equations, with flux(bridge) = 0, the bridge does not gain or loose area pressure_bridges = self.pressure_bridges() # Area change between lumen-bridges flux = self.flux_bridges(flux, pressure_bridges) # Area change due to swelling if self.swelling_bool : flux = self.flux_swelling(flux) # Saving area for the time step given in the configuration file if self.save_area_dat : self.save_area() self.t_old = t if(np.abs(np.sum(flux)) > self.tube_radius ** 2): error = 'total flux is non-zero: total flux = %f' % (np.sum(flux)) self.save_error(error) return flux def flux_lumen(self,flux): """ Determines the flux/ area change for each lumen due to the connection between lumen and lumen Input ----- self network object needs to be called by a class object flux float array vector containing the area change for each lumen; index = lumen ID Returns ------- flux float array area changes due to lumen-lumen connection added to the vector passed """ # for each connection between two lumen for line in range(len(self.lumen_lumen)): lumen_1 = int (self.lumen_lumen[line][0]) # first lumen lumen_2 = int (self.lumen_lumen[line][1]) # second lumen # flux from lumen 2 to lumen 1 fl = (self.pressure(lumen_2) - self.pressure(lumen_1))*self.friction/self.lumen_lumen[line][2] flux[lumen_1] += fl flux[lumen_2] -= fl return flux def pressure_bridges(self): """ Determines the pressure at each bridge for each bridge the total flux is 0, meaning that the bridge does not gain or loose area this gives a linear equation system, which can be solved The connections are taken from the files bridge_lumen.dat and bridge_bridge.dat For Information about the equations see the documentation to the code Input ----- self : network object Needs to be called by a class object Returns ------- pressure_bridges : float array Pressure at each bridge """ R_sum = np.zeros(self.num_bridges, dtype = float) # sum of the resistences around one bridge P_over_R_sum = np.zeros(self.num_bridges, dtype = float) # sum of pressure over resistance between one bridge and all directly connected lumen matrix_bridges = np.zeros([self.num_bridges, self.num_bridges], dtype= float) # matrix to calculate the pressure at each bridge # For each connection between bridge and lumen for line in self.bridge_lumen: bridge = int(line[0]) lumen = int(line[1]) R_sum[bridge] += 1./line[2]*self.friction P_over_R_sum[bridge] += self.pressure(lumen)/line[2]*self.friction # For each connection between bridge and bridge for line in self.bridge_bridge: bridge1 = int(line[0]) bridge2 = int(line[1]) matrix_bridges[bridge1][bridge2] = 1./line[2]*self.friction matrix_bridges[bridge2][bridge1] = 1./line[2]*self.friction R_sum[bridge1] += 1./line[2]*self.friction R_sum[bridge2] += 1./line[2]*self.friction for line in range(self.num_bridges): matrix_bridges[line][line] = -R_sum[line] # Solving linear problem with the pressure at each bridge as solution pressure_bridges = np.linalg.solve(matrix_bridges, -P_over_R_sum) return pressure_bridges; def flux_bridges(self, flux, pressure_bridges): """ Determines the flux/ area change for each lumen due to the connection between lumen and bridge Input ----- self : network object Needs to be called by a class object Returns ------- flux : float array Area changes due to bridge-lumen connection added to the vector passed """ # Area change in one bridge; should be 0; calculated as control value flux_bridge = np.zeros(self.num_bridges, dtype = float) # For each connection between bridge and bridge for line in self.bridge_bridge: bridge1 = int(line[0]) bridge2 = int(line[1]) fb = (pressure_bridges[bridge2] - pressure_bridges[bridge1])*self.friction/line[2] flux_bridge[bridge1] += fb flux_bridge[bridge2] -= fb # For each connection between bridge and lumen for line in self.bridge_lumen: bridge = int(line[0]) lumen = int(line[1]) fl = (pressure_bridges[bridge] - self.pressure(lumen))*self.friction/line[2] flux[lumen] += fl flux_bridge[bridge] -= fl for i in range(len(flux_bridge)): if (np.abs(flux_bridge[i]) > self.tube_radius ** 2): error = 'total flux of bridge %d is non-zero: total flux = %f' % (i,flux_bridge[i]) self.save_error(error) return flux def flux_swelling(self, flux) : """ Determines the flux/ area change for each lumen due to sewlling Input ----- self : network object Needs to be called by a class object Returns ------- flux : float array Area changes due to bridge-lumen connection added to the vector passed """ # for each lumen (lumen is the index of the lumen's area) for lumen in range(len(self.area)) : # if not empty if not self.area[lumen] < 2*self.tube_radius ** 2 : # then add the swelling contribution flux[lumen] += self.swelling(lumen) return flux ############################################################################################################################ ###################################################### Removing Functions ##################################################### ############################################################################################################################ def remove_empty_lumen(self): """ Determines and removes empty lumen Calls a function to obtain a list of empty lumen and passes the list to a function to remove them and reconnect the network Input ----- self : network object Needs to be called by a class object Returns ------- no return """ empty_lumen_list = [] # Creating a list of empty lumen empty_lumen_list = self.get_empty_lumen() # Removing empty lumen and reconnecting the network if (len(empty_lumen_list) > 0 ): event = 'empty lumen: ' + ' '.join(map(str, empty_lumen_list)) #print event self.save_event(event) self.remove_lumen(empty_lumen_list) return; def remove_lumen(self, lumen_to_remove): """ Removes the lumen that are passed and connects the neighbors of these lumen Input ----- self : network object Needs to be called by a class object lumen_to_remove : int list List of lumen to be removed Returns ------- no return """ # For each lumen that has to be removed for lumen in lumen_to_remove: neighbours = self.get_neighbours(lumen) # List of connected lumen bridges = self.get_bridges(lumen) # List of connected bridges self.save_event('lumen ' + str(lumen) + ' neighbours ' + str(neighbours)) self.save_event('lumen ' + str(lumen) + ' bridges ' + str(bridges)) # Lumen had two connections, this means that it disappears and the two connected parts get directly connected, the resistance for the new link is the sum of the resistance of the two previous connections test=True if(len(neighbours) + len(bridges) == 2): # Lumen was connected to two lumen -> new connection between lumen and lumen if(len(neighbours) == 2): self.create_link([neighbours[0][0], neighbours[1][0], neighbours[0][1] + neighbours[1][1]]) #print 'lumen_lumen connexion (' + str(neighbours[0][0]) + ', ' + str(neighbours[1][0]) + ')' # Lumen was connected to a lumen and a bridge -> new connection between lumen and bridge if(len(neighbours) == 1 and len(bridges)==1): self.create_bridge_lumen([bridges[0][0], neighbours[0][0], bridges[0][1] + neighbours[0][1]]) #print 'lumen_bridge connexion (' + str(bridges[0][0]) + ', ' + str(neighbours[0][0]) + ')' # Lumen was connected to two bridges -> new connection between bridge and bridge if(len(bridges)==2): self.create_bridge_bridge([bridges[0][0], bridges[1][0], bridges[0][1] + bridges[1][1]]) #print 'bridge_bridge connexion (' + str(bridges[0][0]) + ', ' + str(bridges[1][0]) + ')' self.create_bridge(neighbours, bridges, lumid=lumen) # Lumen had more than two connections -> becomes a bridge, the resistances remain the same but the connections are changed to connections to a bridge if(len(neighbours) + len(bridges) > 2): self.create_bridge(neighbours, bridges, lumid=lumen) return; def remove_link(self, lumen_1, lumen_2): """ Removes a connection between two lumen Input ----- self : network object Needs to be called by a class object lumen_1 : int First lumen of the connection lumen_2 : Second lumen of the connection Returns ------- no return """ # Due to data structure first lumen must be smaller than second lumen if(lumen_1 > lumen_2): n = lumen_1 lumen_1 = lumen_2 lumen_2 = n # Find connection in lumen_lumen file and remove it line = 0 # For each line in lumen_lumen until connection is found while (line < len(self.lumen_lumen)): # If connection is found removing it if(self.lumen_lumen[line][0] == lumen_1 and self.lumen_lumen[line][1] == lumen_2): event = 'link lumen %d to lumen %d removed' % (lumen_1, lumen_2) #print event self.save_event(event) link = [lumen_1, lumen_2, self.lumen_lumen[line][2]] self.lumen_lumen.remove(link) break; # Look at next line else: line += 1 ############################################################################################################################ ###################################################### Get Functions ##################################################### ############################################################################################################################ def get_empty_lumen(self): """ Gets the IDs of the empty lumen Empty means that the area is smaller than the tube_radius^2 Input ----- self : network object Needs to be called by a class object Returns ------- empty_lumen_list : int list Contains the IDs of the empty lumens """ empty_lumen_list = [] # For each lumen ID for i in range(len(self.area)): # If area is smaller than the treshhold if(self.area[i] < self.tube_radius ** 2 and self.empty_list[i] == 0): self.empty_list[i] = 1 self.area[i] = 0 empty_lumen_list.append(i) return empty_lumen_list def get_neighbours(self, lumen): """ Gets the lumen that are directly connected to the lumen passed on and deletes the connections Input ----- self : network object Needs to be called by a class object lumen : int ID of a lumen Returns ------- neighbour_list : int list ID of all lumen that are directly connected to the lumen passed on """ neighbour_list = [] line = 0 # Going through links in lumen_lumen.dat while line < len(self.lumen_lumen) and self.lumen_lumen[line][0] < lumen : if self.lumen_lumen[line][1] == lumen : neighbour_list.append([self.lumen_lumen[line][0], self.lumen_lumen[line][2]]) event = 'link lumen %d to lumen %d removed' % (self.lumen_lumen[line][0], lumen) self.save_event(event) link = [self.lumen_lumen[line][0], self.lumen_lumen[line][1], self.lumen_lumen[line][2]] self.lumen_lumen.remove(link) else : line += 1 while line < len(self.lumen_lumen) and self.lumen_lumen[line][0] < lumen : line += 1 while(line < len(self.lumen_lumen) and self.lumen_lumen[line][0] == lumen): neighbour_list.append([self.lumen_lumen[line][1], self.lumen_lumen[line][2]]) event = 'link lumen %d to lumen %d removed' % (lumen, self.lumen_lumen[line][1]) self.save_event(event) link = [self.lumen_lumen[line][0], self.lumen_lumen[line][1], self.lumen_lumen[line][2]] self.lumen_lumen.remove(link) return neighbour_list def get_bridges(self, lumen): """ Gets the bridges that are directly connected to the lumen passed on Input ----- self : network object Needs to be called by a class object lumen : int ID of a lumen Returns ------- neighbour_list : int list ID of all lumen that are directly connected to the lumen passed on """ bridge_list = [] line = 0 # Going through the links in bridge_lumen.dat while(line < len(self.bridge_lumen)): if (self.bridge_lumen[line][1] == lumen): bridge_list.append([self.bridge_lumen[line][0], self.bridge_lumen[line][2]]) event = 'link bridge %d to lumen %d removed' % (self.bridge_lumen[line][0], lumen) self.save_event(event) self.bridge_lumen.remove(self.bridge_lumen[line]) else: line += 1 return bridge_list ############################################################################################################################ #################################################### Creating Functions ################################################### ############################################################################################################################ def create_link(self, link): """ Creates a link between two lumen in lumen_lumen.dat Input ----- self : network object Needs to be called by a class object link : float array [ID lumen1, ID lumen2, length] Returns ------- no return """ # no self-loops allowed if(len(link) == 4 and link[0] != link[1]): # Ensuring: lumen_1 < lumen_2 if(link[0] < link[2]): lumen_1 = link[0] lumen_2 = link[1] else: lumen_1 = link[1] lumen_2 = link[0] length = link[2] line = 0 # Finding line in lumen_lumen.dat, to keep the sorting while(line < len(self.lumen_lumen) and lumen_1 > self.lumen_lumen[line][0]): line += 1 if(line < len(self.lumen_lumen) - 1): while(line < len(self.lumen_lumen) and lumen_2 > self.lumen_lumen[line][1] and lumen_1 == self.lumen_lumen[line][0]): line += 1 # Creating the link in lumen_lumen.dat self.lumen_lumen.append([lumen_1,lumen_2, length]) self.lumen_lumen.sort() event = 'link lumen %d to lumen %d created' % (lumen_1,lumen_2) self.save_event(event) return; def create_bridge_lumen(self, link): """ Creates a link between a lumen and a bridge in bridge_lumen.dat Input ----- self : network object Needs to be called by a class object link : float array [ID bridge, ID lumen, length] Returns ------- no return """ bridge = link[0] lumen = link[1] length = link[2] line = 0 # Creating the link in bridge_lumen.dat self.bridge_lumen.append(link) self.bridge_lumen.sort() event = 'link bridge %d to lumen %d created' % (bridge,lumen) self.save_event(event) return; def create_bridge_bridge(self, link): """ Creates a link between two bridges in bridge_bridge.dat Input ----- self : network object Needs to be called by a class object link : float array [ID bridge1, ID bridge2, length] Returns ------- no return """ if(link[0] == link[1]): return; if(link[0] < link[1]): bridge_1 = link[0] bridge_2 = link[1] else: bridge_1 = link[1] bridge_2 = link[0] length = link[2] line = 0 # Creating the link in bridge_bridge.dat self.bridge_bridge.append([bridge_1,bridge_2, length]) self.bridge_bridge.sort() event = 'link bridge %d to bridge %d created' % (bridge_1,bridge_2) self.save_event(event) return; def create_bridge(self, lumen, bridge, lumid): """ Creates a new bridge connected with the lumen and bridges passed on Input ----- self : network object Needs to be called by a class object lumen : int list [[lumen ID, length], [lumen ID, length],.....] lumen IDs to which the new bridge should be connected to bridge : int list [[bridge ID, length], [bridge ID, length],.....] bridge IDs to which the new bridge should be connected to Returns ------- no return """ ##### bridge_conversionfile = os.path.join(self.network_folder,'bridgesconversion.txt') # ID of the new bridge bridge_number = self.num_bridges # Bridge ID counter, contains the ID of the next new bridge self.num_bridges += 1 event = 'new bridge %d' % (bridge_number) + ' (' + str(lumid) + ')' self.save_event(event) line = 0 lumen.sort() bridge.sort() # For each lumen that should be connected to the new bridge for i in range(len(lumen)): new_link = [bridge_number, lumen[i][0], lumen[i][1]] # Create link in bridge_lumen.dat self.create_bridge_lumen(new_link) # For each lumen that should be connected to the new bridge for i in range(len(bridge)): new_link = [bridge[i][0], bridge_number, bridge[i][1]] # Create link in bridge_bridge.dat self.create_bridge_bridge(new_link) open(bridge_conversionfile, 'a').write(str(bridge_number) + ' ' + str(lumid)+ '\n') return; ############################################################################################################################ ################################ Geometric Functions for area and Pressure ############################################### ############################################################################################################################ def set_theta(self): """ Sets the angle theta Calculates the angle theta, angle between the lumen and the tube Input ----- self : network object Needs to be called by a class object Returns ------- theta : float list Theta value for each lumen """ theta = [] for i in range(len(self.alpha)): #cos = (2*self.alpha[i]-(4*self.alpha[i]**2-self.delta[i]**2+1)/(4*self.alpha[i]))/self.delta[i] ## Old version, for assymmetric lumen #theta.append(math.acos(cos)) theta.append(np.arccos(self.alpha[i])) return theta; def set_area_factor(self): """ Sets the area factor, needed to express the pressure in terms of the area instead of the curvature radius Input ----- self : network object Needs to be called by a class object Returns ------- area_factor : float list Area factor for each lumen """ area_factor = [] for i in range(len(self.alpha)): area_factor.append(np.sqrt((2*self.theta[i]-np.sin(2*self.theta[i])))) return area_factor; def opening_radius(self, lumen): """ Calculates the length/2 parallel to the 'tube' where the membrane is not attached for a given lumen Input ----- lumen : int ID of the lumen Returns ------- radius : float Length/2 of the opening radius """ return np.sqrt(2*self.area[lumen]/(2*self.theta[lumen]-np.sin(2*self.theta[lumen])))*np.sin(self.theta[lumen]) def get_area(self, lumen): """ Calculates the area in one half of the lumen (for symmetric lumen) Input ----- lumen : int ID of the lumen Returns ------- area : float Area/2 of the lumen """ area = self.area[lumen] return area def pressure(self,lumen): """ Calculates the pressure inside the lumen (for symmetric lumen) Input ----- lumen : int ID of the lumen Returns ------- pressure : float Pressure of the lumen """ area = self.get_area(lumen) # Avoid dividing by zero if(area < 0.1 * self.tube_radius**2 ): error = 'division by zero in pressure: lumen ID: %d' % (lumen) self.save_error(error) pressure = self.gamma_lumen[lumen]*self.area_factor[lumen]/np.sqrt(area) return pressure ############################################################################################################################ ################################################# Reading Functions ######################################################## ############################################################################################################################ def read_lumen_lumen(self, lumen_lumen_file): """ Reading the file with links between two lumens Input ----- lumen_lumen_file : str File path to file with the links between two lumens Returns ------- lumen_lumen : float list [lumen1, lumen2, length] Information about the links between two lumens """ if (os.path.getsize(lumen_lumen_file)>0): # If the file is not empty lumen_1, lumen_2 = np.loadtxt(lumen_lumen_file, dtype = int, usecols = [0,1], unpack = True) length = np.loadtxt(lumen_lumen_file, dtype = float, usecols = [2]) lumen_lumen = np.column_stack([lumen_1, lumen_2, length]).tolist() else: lumen_lumen = [] return lumen_lumen def read_bridge_lumen(self, bridge_lumen_file): """ Reading the file with links between bridge and lumen Input ----- bridge_lumen_file : str File path to file with the links between bridge and lumen Returns ------- bridge_lumen : float list [bridge, lumen, length] Information about the links between bridge and lumen num_bridges : int Number of bridge_lumen links """ with open(bridge_lumen_file, 'r') as f: lines = f.read().splitlines() last_line = lines[-1] if ('#' in last_line): # If the file is empty bridge_lumen = [] num_bridges = 0 # number of existing bridges else: bridge, lumen = np.loadtxt(bridge_lumen_file, dtype = int, usecols = [0,1], unpack = True) length = np.loadtxt(bridge_lumen_file, dtype = float, usecols = [2]) bridge_lumen = np.column_stack([bridge, lumen, length]).tolist() num_bridges = max(bridge)+1 # number of existing bridges return bridge_lumen, num_bridges def read_bridge_bridge(self, bridge_bridge_file, num_bridges): """ Reading the file with links between two bridge Input ----- bridge_bridge_file : str File path to file with the links between two bridge Returns ------- bridge_bridge : float list [bridge1, bridge2, length] Information about the links between two bridge num : int Number of bridge_bridge links """ with open(bridge_bridge_file, 'r') as f: lines = f.read().splitlines() last_line = lines[-1] if ('#' in last_line>0): # If the file is empty bridge_bridge = [] num = num_bridges else: bridge1, bridge2 = np.loadtxt(bridge_bridge_file, dtype = int, usecols = [0,1], unpack = True) length = np.loadtxt(bridge_bridge_file, dtype = float, usecols = [2]) bridge_bridge = np.column_stack([bridge1, bridge2, length]).tolist() if (max(bridge2)+1 > num_bridges): num = max(bridge2)+1 return bridge_bridge, num ############################################################################################################################ ################################################# Output functions ######################################################### ############################################################################################################################ def save_event(self, event, start = False, out_path = ''): """ Saves each event in the output folder in the file event.dat Events like a lumen disappearing, reconnections in the graph Input ----- event : str Message of the event start : boolean True: File is created False: the message is stored in the file Returns ------ no return """ if(start): header_event = '# Saves each event during the simulation; event is a disappearing lumen, graph reconnection \n' self.file_event = os.path.join(out_path, 'event.dat') fevent = open(self.file_event, 'w') fevent.write(header_event) fevent.close() else: fevent = open(self.file_event, 'a') fevent.write('%.5f' % self.current_time) fevent.write(' ') fevent.write(event) fevent.write('\n') fevent.close() return; def save_error(self, error, start = False, out_path = ''): """ Saves errors in the output folder in the file error.dat Errors like volume loss Input ----- error : string Message of the event start : boolean True: File is created False: the message is stored in the file Returns ------ no return """ if(start): header_error = '# Saves each warning like volume loss \n' self.file_error = os.path.join(out_path, 'error.dat') ferror = open(self.file_error, 'w') ferror.write(header_error) ferror.close() else: ferror = open(self.file_error, 'a') ferror.write('%.5f' % self.current_time) ferror.write(' ') ferror.write(error) ferror.write('\n') ferror.close() return; def save_area(self, start = False, out_path = ''): """ Saves the volume evolution in the output folder in the file area.dat Input ----- start : boolean True: File is created False: the message is stored in the file Returns ------ no return """ if(start): header_volume = '# Saves the volume evolution of each lumen for the time step %f \n' %(self.time_step) self.file_area = os.path.join(out_path, 'area.dat') farea = open(self.file_area, 'w') farea.write(header_volume) farea.close() self.t_old = 0 else: farea = open(self.file_area, 'a') farea.write('%.5f' % self.current_time) farea.write(' ') farea.write(' '.join(map(str, self.area))) farea.write('\n') farea.close() return; ############################################################################################################################ ################################################# Swelling functions ####################################################### ############################################################################################################################ def swelling(self, lumen) : """ self.swelling(lumen) Calculates the input flux for the area fo a given lumen, due to swelling. Input ----- lumen : int Index of the lumen """ area = self.get_area(lumen) theta = self.theta[lumen] flux_swelling = self.swelling_rate * 4 * theta * np.sqrt(area)/ self.area_factor[lumen] #print flux_swelling return flux_swelling
# Library for the dynamics of a lumen network # The lumen are 2 dimensional and symmetric and connected with 1 dimensional tubes # # Created by <NAME>, 2018 # Modified by <NAME>--Serandour on 8/04/2019 """ network.py conf.init Defines the class network and associated functions Imports ------- Libraries : numpy, os, math Created by <NAME> Modified by <NAME> on 8/06/2018 Modified by <NAME>--Serandour on 8/04/2019 """ import numpy as np import math import os class network: def __init__(self, network_folder, out_path, t_step, tube_radius = 0.01, friction = 1, swelling = False, swelling_rate=0., save_area_dat=False): """ Initialization of the object network All properties needed for the simulation are read and initialized Input ----- network_folder : str out_path : str, path-like t_step : float Time step of the simulation. Note that if the simulation is adaptative, this time step will change. tube_radius : float, optional, default = 0.01 Radius of the tube connecting lumens. Define the condition for empty lumens. friction : float, optional, default = 1 Friction constant for the fluid circulating through pipes. swelling : bool, optional, default = False Swelling option for the simulation. True if swelling is included, False otherwise. swelling_rate : float, optional, default = 0. Swelling rate value in case the swelling is considered. Make sure the rate is not to big to avoid non-converging simulations. save_area_dat : bool, optional, default = False Save area option. True if areas are saved in area.dat, False otherwise. """ self.network_folder = network_folder # Reading properties of the lumen self.gamma_lumen, self.gamma_contact, self.area = np.loadtxt(os.path.join(network_folder, 'lumen.dat'), dtype = float, usecols = [0,2,3], unpack = True) # Reading links between two lumen self.lumen_lumen = self.read_lumen_lumen(os.path.join(network_folder, 'lumen_lumen.dat')) # Reading links between bridge and lumen self.bridge_lumen, self.num_bridges = self.read_bridge_lumen(os.path.join(network_folder, 'bridge_lumen.dat')) # Reading links between two bridges self.bridge_bridge, self.num_bridges = self.read_bridge_bridge(os.path.join(network_folder, 'bridge_bridge.dat'), self.num_bridges) # Surface tension ratio self.alpha = self.gamma_contact/(2*self.gamma_lumen) self.delta = np.full(len(self.alpha), 1) # Possibility of asymmetric lumen is not included # Resistances self.tube_radius = tube_radius # Radius of the tube connecting the lumen and the bridges self.friction = friction # Friction coefficient; friction * length = resistance # Opening angle of the lumen (angle between curvature and tube) self.theta = self.set_theta() # Area factor for expressing the pressure in terms of the area instead of the radius self.area_factor = self.set_area_factor() # Ending time: time at which only one lumen is remaining self.end_time = 0 # Time step for the output of the area evolution self.time_step = t_step # Creating output file for the area evolution, events, error messages self.save_area(start = True, out_path = out_path) self.save_event('', start = True, out_path = out_path) self.save_error('', start = True, out_path = out_path) # Area distribution after only one lumen is remaining self.final_area = [] # Current time step of the simulation self.current_time = 0 # List of empty lumen (area < tube_radius **2) self.empty_list = np.zeros(len(self.alpha)) # Swelling self.swelling_bool = swelling self.swelling_rate = swelling_rate # Save area self.save_area_dat = save_area_dat ############################################################################################################################ ########################################################## Dynamics ######################################################## ############################################################################################################################ def flux(self, t, state): """ Determines the flux/ area change for each lumen of the network, main function of network.py Input ----- self : network object Needs to be called by a class object t : float Actual time step (not needed for the calculation of the flux, but required for the used integration method in network_simulation.py state : float array The current area of the lumens Returns ------- flux : float array Contains the area change for each lumen in dt """ # Initialization of the array containing the area change (index == lumen ID) flux = [] self.current_time = t for i in range(len(self.alpha)): flux.append(0) # If only one lumen remains -> End of simulation, flux is zero (needed as for the integration method used, no dynamic stop is possible) if(np.sum(self.empty_list) >= len(self.alpha) - 1): if(self.end_time == 0): # Setting the end time for the output file area.log self.end_time = t # more than one lumen remaining: calculation of the flux else: # Adapting network to new state: Empty lumen are removed and graph is reconnected self.area = state self.remove_empty_lumen() # Area change between directly connected lumen flux = self.flux_lumen(flux) # Calculating artificial pressure at each bridge; linear system of equations, with flux(bridge) = 0, the bridge does not gain or loose area pressure_bridges = self.pressure_bridges() # Area change between lumen-bridges flux = self.flux_bridges(flux, pressure_bridges) # Area change due to swelling if self.swelling_bool : flux = self.flux_swelling(flux) # Saving area for the time step given in the configuration file if self.save_area_dat : self.save_area() self.t_old = t if(np.abs(np.sum(flux)) > self.tube_radius ** 2): error = 'total flux is non-zero: total flux = %f' % (np.sum(flux)) self.save_error(error) return flux def flux_lumen(self,flux): """ Determines the flux/ area change for each lumen due to the connection between lumen and lumen Input ----- self network object needs to be called by a class object flux float array vector containing the area change for each lumen; index = lumen ID Returns ------- flux float array area changes due to lumen-lumen connection added to the vector passed """ # for each connection between two lumen for line in range(len(self.lumen_lumen)): lumen_1 = int (self.lumen_lumen[line][0]) # first lumen lumen_2 = int (self.lumen_lumen[line][1]) # second lumen # flux from lumen 2 to lumen 1 fl = (self.pressure(lumen_2) - self.pressure(lumen_1))*self.friction/self.lumen_lumen[line][2] flux[lumen_1] += fl flux[lumen_2] -= fl return flux def pressure_bridges(self): """ Determines the pressure at each bridge for each bridge the total flux is 0, meaning that the bridge does not gain or loose area this gives a linear equation system, which can be solved The connections are taken from the files bridge_lumen.dat and bridge_bridge.dat For Information about the equations see the documentation to the code Input ----- self : network object Needs to be called by a class object Returns ------- pressure_bridges : float array Pressure at each bridge """ R_sum = np.zeros(self.num_bridges, dtype = float) # sum of the resistences around one bridge P_over_R_sum = np.zeros(self.num_bridges, dtype = float) # sum of pressure over resistance between one bridge and all directly connected lumen matrix_bridges = np.zeros([self.num_bridges, self.num_bridges], dtype= float) # matrix to calculate the pressure at each bridge # For each connection between bridge and lumen for line in self.bridge_lumen: bridge = int(line[0]) lumen = int(line[1]) R_sum[bridge] += 1./line[2]*self.friction P_over_R_sum[bridge] += self.pressure(lumen)/line[2]*self.friction # For each connection between bridge and bridge for line in self.bridge_bridge: bridge1 = int(line[0]) bridge2 = int(line[1]) matrix_bridges[bridge1][bridge2] = 1./line[2]*self.friction matrix_bridges[bridge2][bridge1] = 1./line[2]*self.friction R_sum[bridge1] += 1./line[2]*self.friction R_sum[bridge2] += 1./line[2]*self.friction for line in range(self.num_bridges): matrix_bridges[line][line] = -R_sum[line] # Solving linear problem with the pressure at each bridge as solution pressure_bridges = np.linalg.solve(matrix_bridges, -P_over_R_sum) return pressure_bridges; def flux_bridges(self, flux, pressure_bridges): """ Determines the flux/ area change for each lumen due to the connection between lumen and bridge Input ----- self : network object Needs to be called by a class object Returns ------- flux : float array Area changes due to bridge-lumen connection added to the vector passed """ # Area change in one bridge; should be 0; calculated as control value flux_bridge = np.zeros(self.num_bridges, dtype = float) # For each connection between bridge and bridge for line in self.bridge_bridge: bridge1 = int(line[0]) bridge2 = int(line[1]) fb = (pressure_bridges[bridge2] - pressure_bridges[bridge1])*self.friction/line[2] flux_bridge[bridge1] += fb flux_bridge[bridge2] -= fb # For each connection between bridge and lumen for line in self.bridge_lumen: bridge = int(line[0]) lumen = int(line[1]) fl = (pressure_bridges[bridge] - self.pressure(lumen))*self.friction/line[2] flux[lumen] += fl flux_bridge[bridge] -= fl for i in range(len(flux_bridge)): if (np.abs(flux_bridge[i]) > self.tube_radius ** 2): error = 'total flux of bridge %d is non-zero: total flux = %f' % (i,flux_bridge[i]) self.save_error(error) return flux def flux_swelling(self, flux) : """ Determines the flux/ area change for each lumen due to sewlling Input ----- self : network object Needs to be called by a class object Returns ------- flux : float array Area changes due to bridge-lumen connection added to the vector passed """ # for each lumen (lumen is the index of the lumen's area) for lumen in range(len(self.area)) : # if not empty if not self.area[lumen] < 2*self.tube_radius ** 2 : # then add the swelling contribution flux[lumen] += self.swelling(lumen) return flux ############################################################################################################################ ###################################################### Removing Functions ##################################################### ############################################################################################################################ def remove_empty_lumen(self): """ Determines and removes empty lumen Calls a function to obtain a list of empty lumen and passes the list to a function to remove them and reconnect the network Input ----- self : network object Needs to be called by a class object Returns ------- no return """ empty_lumen_list = [] # Creating a list of empty lumen empty_lumen_list = self.get_empty_lumen() # Removing empty lumen and reconnecting the network if (len(empty_lumen_list) > 0 ): event = 'empty lumen: ' + ' '.join(map(str, empty_lumen_list)) #print event self.save_event(event) self.remove_lumen(empty_lumen_list) return; def remove_lumen(self, lumen_to_remove): """ Removes the lumen that are passed and connects the neighbors of these lumen Input ----- self : network object Needs to be called by a class object lumen_to_remove : int list List of lumen to be removed Returns ------- no return """ # For each lumen that has to be removed for lumen in lumen_to_remove: neighbours = self.get_neighbours(lumen) # List of connected lumen bridges = self.get_bridges(lumen) # List of connected bridges self.save_event('lumen ' + str(lumen) + ' neighbours ' + str(neighbours)) self.save_event('lumen ' + str(lumen) + ' bridges ' + str(bridges)) # Lumen had two connections, this means that it disappears and the two connected parts get directly connected, the resistance for the new link is the sum of the resistance of the two previous connections test=True if(len(neighbours) + len(bridges) == 2): # Lumen was connected to two lumen -> new connection between lumen and lumen if(len(neighbours) == 2): self.create_link([neighbours[0][0], neighbours[1][0], neighbours[0][1] + neighbours[1][1]]) #print 'lumen_lumen connexion (' + str(neighbours[0][0]) + ', ' + str(neighbours[1][0]) + ')' # Lumen was connected to a lumen and a bridge -> new connection between lumen and bridge if(len(neighbours) == 1 and len(bridges)==1): self.create_bridge_lumen([bridges[0][0], neighbours[0][0], bridges[0][1] + neighbours[0][1]]) #print 'lumen_bridge connexion (' + str(bridges[0][0]) + ', ' + str(neighbours[0][0]) + ')' # Lumen was connected to two bridges -> new connection between bridge and bridge if(len(bridges)==2): self.create_bridge_bridge([bridges[0][0], bridges[1][0], bridges[0][1] + bridges[1][1]]) #print 'bridge_bridge connexion (' + str(bridges[0][0]) + ', ' + str(bridges[1][0]) + ')' self.create_bridge(neighbours, bridges, lumid=lumen) # Lumen had more than two connections -> becomes a bridge, the resistances remain the same but the connections are changed to connections to a bridge if(len(neighbours) + len(bridges) > 2): self.create_bridge(neighbours, bridges, lumid=lumen) return; def remove_link(self, lumen_1, lumen_2): """ Removes a connection between two lumen Input ----- self : network object Needs to be called by a class object lumen_1 : int First lumen of the connection lumen_2 : Second lumen of the connection Returns ------- no return """ # Due to data structure first lumen must be smaller than second lumen if(lumen_1 > lumen_2): n = lumen_1 lumen_1 = lumen_2 lumen_2 = n # Find connection in lumen_lumen file and remove it line = 0 # For each line in lumen_lumen until connection is found while (line < len(self.lumen_lumen)): # If connection is found removing it if(self.lumen_lumen[line][0] == lumen_1 and self.lumen_lumen[line][1] == lumen_2): event = 'link lumen %d to lumen %d removed' % (lumen_1, lumen_2) #print event self.save_event(event) link = [lumen_1, lumen_2, self.lumen_lumen[line][2]] self.lumen_lumen.remove(link) break; # Look at next line else: line += 1 ############################################################################################################################ ###################################################### Get Functions ##################################################### ############################################################################################################################ def get_empty_lumen(self): """ Gets the IDs of the empty lumen Empty means that the area is smaller than the tube_radius^2 Input ----- self : network object Needs to be called by a class object Returns ------- empty_lumen_list : int list Contains the IDs of the empty lumens """ empty_lumen_list = [] # For each lumen ID for i in range(len(self.area)): # If area is smaller than the treshhold if(self.area[i] < self.tube_radius ** 2 and self.empty_list[i] == 0): self.empty_list[i] = 1 self.area[i] = 0 empty_lumen_list.append(i) return empty_lumen_list def get_neighbours(self, lumen): """ Gets the lumen that are directly connected to the lumen passed on and deletes the connections Input ----- self : network object Needs to be called by a class object lumen : int ID of a lumen Returns ------- neighbour_list : int list ID of all lumen that are directly connected to the lumen passed on """ neighbour_list = [] line = 0 # Going through links in lumen_lumen.dat while line < len(self.lumen_lumen) and self.lumen_lumen[line][0] < lumen : if self.lumen_lumen[line][1] == lumen : neighbour_list.append([self.lumen_lumen[line][0], self.lumen_lumen[line][2]]) event = 'link lumen %d to lumen %d removed' % (self.lumen_lumen[line][0], lumen) self.save_event(event) link = [self.lumen_lumen[line][0], self.lumen_lumen[line][1], self.lumen_lumen[line][2]] self.lumen_lumen.remove(link) else : line += 1 while line < len(self.lumen_lumen) and self.lumen_lumen[line][0] < lumen : line += 1 while(line < len(self.lumen_lumen) and self.lumen_lumen[line][0] == lumen): neighbour_list.append([self.lumen_lumen[line][1], self.lumen_lumen[line][2]]) event = 'link lumen %d to lumen %d removed' % (lumen, self.lumen_lumen[line][1]) self.save_event(event) link = [self.lumen_lumen[line][0], self.lumen_lumen[line][1], self.lumen_lumen[line][2]] self.lumen_lumen.remove(link) return neighbour_list def get_bridges(self, lumen): """ Gets the bridges that are directly connected to the lumen passed on Input ----- self : network object Needs to be called by a class object lumen : int ID of a lumen Returns ------- neighbour_list : int list ID of all lumen that are directly connected to the lumen passed on """ bridge_list = [] line = 0 # Going through the links in bridge_lumen.dat while(line < len(self.bridge_lumen)): if (self.bridge_lumen[line][1] == lumen): bridge_list.append([self.bridge_lumen[line][0], self.bridge_lumen[line][2]]) event = 'link bridge %d to lumen %d removed' % (self.bridge_lumen[line][0], lumen) self.save_event(event) self.bridge_lumen.remove(self.bridge_lumen[line]) else: line += 1 return bridge_list ############################################################################################################################ #################################################### Creating Functions ################################################### ############################################################################################################################ def create_link(self, link): """ Creates a link between two lumen in lumen_lumen.dat Input ----- self : network object Needs to be called by a class object link : float array [ID lumen1, ID lumen2, length] Returns ------- no return """ # no self-loops allowed if(len(link) == 4 and link[0] != link[1]): # Ensuring: lumen_1 < lumen_2 if(link[0] < link[2]): lumen_1 = link[0] lumen_2 = link[1] else: lumen_1 = link[1] lumen_2 = link[0] length = link[2] line = 0 # Finding line in lumen_lumen.dat, to keep the sorting while(line < len(self.lumen_lumen) and lumen_1 > self.lumen_lumen[line][0]): line += 1 if(line < len(self.lumen_lumen) - 1): while(line < len(self.lumen_lumen) and lumen_2 > self.lumen_lumen[line][1] and lumen_1 == self.lumen_lumen[line][0]): line += 1 # Creating the link in lumen_lumen.dat self.lumen_lumen.append([lumen_1,lumen_2, length]) self.lumen_lumen.sort() event = 'link lumen %d to lumen %d created' % (lumen_1,lumen_2) self.save_event(event) return; def create_bridge_lumen(self, link): """ Creates a link between a lumen and a bridge in bridge_lumen.dat Input ----- self : network object Needs to be called by a class object link : float array [ID bridge, ID lumen, length] Returns ------- no return """ bridge = link[0] lumen = link[1] length = link[2] line = 0 # Creating the link in bridge_lumen.dat self.bridge_lumen.append(link) self.bridge_lumen.sort() event = 'link bridge %d to lumen %d created' % (bridge,lumen) self.save_event(event) return; def create_bridge_bridge(self, link): """ Creates a link between two bridges in bridge_bridge.dat Input ----- self : network object Needs to be called by a class object link : float array [ID bridge1, ID bridge2, length] Returns ------- no return """ if(link[0] == link[1]): return; if(link[0] < link[1]): bridge_1 = link[0] bridge_2 = link[1] else: bridge_1 = link[1] bridge_2 = link[0] length = link[2] line = 0 # Creating the link in bridge_bridge.dat self.bridge_bridge.append([bridge_1,bridge_2, length]) self.bridge_bridge.sort() event = 'link bridge %d to bridge %d created' % (bridge_1,bridge_2) self.save_event(event) return; def create_bridge(self, lumen, bridge, lumid): """ Creates a new bridge connected with the lumen and bridges passed on Input ----- self : network object Needs to be called by a class object lumen : int list [[lumen ID, length], [lumen ID, length],.....] lumen IDs to which the new bridge should be connected to bridge : int list [[bridge ID, length], [bridge ID, length],.....] bridge IDs to which the new bridge should be connected to Returns ------- no return """ ##### bridge_conversionfile = os.path.join(self.network_folder,'bridgesconversion.txt') # ID of the new bridge bridge_number = self.num_bridges # Bridge ID counter, contains the ID of the next new bridge self.num_bridges += 1 event = 'new bridge %d' % (bridge_number) + ' (' + str(lumid) + ')' self.save_event(event) line = 0 lumen.sort() bridge.sort() # For each lumen that should be connected to the new bridge for i in range(len(lumen)): new_link = [bridge_number, lumen[i][0], lumen[i][1]] # Create link in bridge_lumen.dat self.create_bridge_lumen(new_link) # For each lumen that should be connected to the new bridge for i in range(len(bridge)): new_link = [bridge[i][0], bridge_number, bridge[i][1]] # Create link in bridge_bridge.dat self.create_bridge_bridge(new_link) open(bridge_conversionfile, 'a').write(str(bridge_number) + ' ' + str(lumid)+ '\n') return; ############################################################################################################################ ################################ Geometric Functions for area and Pressure ############################################### ############################################################################################################################ def set_theta(self): """ Sets the angle theta Calculates the angle theta, angle between the lumen and the tube Input ----- self : network object Needs to be called by a class object Returns ------- theta : float list Theta value for each lumen """ theta = [] for i in range(len(self.alpha)): #cos = (2*self.alpha[i]-(4*self.alpha[i]**2-self.delta[i]**2+1)/(4*self.alpha[i]))/self.delta[i] ## Old version, for assymmetric lumen #theta.append(math.acos(cos)) theta.append(np.arccos(self.alpha[i])) return theta; def set_area_factor(self): """ Sets the area factor, needed to express the pressure in terms of the area instead of the curvature radius Input ----- self : network object Needs to be called by a class object Returns ------- area_factor : float list Area factor for each lumen """ area_factor = [] for i in range(len(self.alpha)): area_factor.append(np.sqrt((2*self.theta[i]-np.sin(2*self.theta[i])))) return area_factor; def opening_radius(self, lumen): """ Calculates the length/2 parallel to the 'tube' where the membrane is not attached for a given lumen Input ----- lumen : int ID of the lumen Returns ------- radius : float Length/2 of the opening radius """ return np.sqrt(2*self.area[lumen]/(2*self.theta[lumen]-np.sin(2*self.theta[lumen])))*np.sin(self.theta[lumen]) def get_area(self, lumen): """ Calculates the area in one half of the lumen (for symmetric lumen) Input ----- lumen : int ID of the lumen Returns ------- area : float Area/2 of the lumen """ area = self.area[lumen] return area def pressure(self,lumen): """ Calculates the pressure inside the lumen (for symmetric lumen) Input ----- lumen : int ID of the lumen Returns ------- pressure : float Pressure of the lumen """ area = self.get_area(lumen) # Avoid dividing by zero if(area < 0.1 * self.tube_radius**2 ): error = 'division by zero in pressure: lumen ID: %d' % (lumen) self.save_error(error) pressure = self.gamma_lumen[lumen]*self.area_factor[lumen]/np.sqrt(area) return pressure ############################################################################################################################ ################################################# Reading Functions ######################################################## ############################################################################################################################ def read_lumen_lumen(self, lumen_lumen_file): """ Reading the file with links between two lumens Input ----- lumen_lumen_file : str File path to file with the links between two lumens Returns ------- lumen_lumen : float list [lumen1, lumen2, length] Information about the links between two lumens """ if (os.path.getsize(lumen_lumen_file)>0): # If the file is not empty lumen_1, lumen_2 = np.loadtxt(lumen_lumen_file, dtype = int, usecols = [0,1], unpack = True) length = np.loadtxt(lumen_lumen_file, dtype = float, usecols = [2]) lumen_lumen = np.column_stack([lumen_1, lumen_2, length]).tolist() else: lumen_lumen = [] return lumen_lumen def read_bridge_lumen(self, bridge_lumen_file): """ Reading the file with links between bridge and lumen Input ----- bridge_lumen_file : str File path to file with the links between bridge and lumen Returns ------- bridge_lumen : float list [bridge, lumen, length] Information about the links between bridge and lumen num_bridges : int Number of bridge_lumen links """ with open(bridge_lumen_file, 'r') as f: lines = f.read().splitlines() last_line = lines[-1] if ('#' in last_line): # If the file is empty bridge_lumen = [] num_bridges = 0 # number of existing bridges else: bridge, lumen = np.loadtxt(bridge_lumen_file, dtype = int, usecols = [0,1], unpack = True) length = np.loadtxt(bridge_lumen_file, dtype = float, usecols = [2]) bridge_lumen = np.column_stack([bridge, lumen, length]).tolist() num_bridges = max(bridge)+1 # number of existing bridges return bridge_lumen, num_bridges def read_bridge_bridge(self, bridge_bridge_file, num_bridges): """ Reading the file with links between two bridge Input ----- bridge_bridge_file : str File path to file with the links between two bridge Returns ------- bridge_bridge : float list [bridge1, bridge2, length] Information about the links between two bridge num : int Number of bridge_bridge links """ with open(bridge_bridge_file, 'r') as f: lines = f.read().splitlines() last_line = lines[-1] if ('#' in last_line>0): # If the file is empty bridge_bridge = [] num = num_bridges else: bridge1, bridge2 = np.loadtxt(bridge_bridge_file, dtype = int, usecols = [0,1], unpack = True) length = np.loadtxt(bridge_bridge_file, dtype = float, usecols = [2]) bridge_bridge = np.column_stack([bridge1, bridge2, length]).tolist() if (max(bridge2)+1 > num_bridges): num = max(bridge2)+1 return bridge_bridge, num ############################################################################################################################ ################################################# Output functions ######################################################### ############################################################################################################################ def save_event(self, event, start = False, out_path = ''): """ Saves each event in the output folder in the file event.dat Events like a lumen disappearing, reconnections in the graph Input ----- event : str Message of the event start : boolean True: File is created False: the message is stored in the file Returns ------ no return """ if(start): header_event = '# Saves each event during the simulation; event is a disappearing lumen, graph reconnection \n' self.file_event = os.path.join(out_path, 'event.dat') fevent = open(self.file_event, 'w') fevent.write(header_event) fevent.close() else: fevent = open(self.file_event, 'a') fevent.write('%.5f' % self.current_time) fevent.write(' ') fevent.write(event) fevent.write('\n') fevent.close() return; def save_error(self, error, start = False, out_path = ''): """ Saves errors in the output folder in the file error.dat Errors like volume loss Input ----- error : string Message of the event start : boolean True: File is created False: the message is stored in the file Returns ------ no return """ if(start): header_error = '# Saves each warning like volume loss \n' self.file_error = os.path.join(out_path, 'error.dat') ferror = open(self.file_error, 'w') ferror.write(header_error) ferror.close() else: ferror = open(self.file_error, 'a') ferror.write('%.5f' % self.current_time) ferror.write(' ') ferror.write(error) ferror.write('\n') ferror.close() return; def save_area(self, start = False, out_path = ''): """ Saves the volume evolution in the output folder in the file area.dat Input ----- start : boolean True: File is created False: the message is stored in the file Returns ------ no return """ if(start): header_volume = '# Saves the volume evolution of each lumen for the time step %f \n' %(self.time_step) self.file_area = os.path.join(out_path, 'area.dat') farea = open(self.file_area, 'w') farea.write(header_volume) farea.close() self.t_old = 0 else: farea = open(self.file_area, 'a') farea.write('%.5f' % self.current_time) farea.write(' ') farea.write(' '.join(map(str, self.area))) farea.write('\n') farea.close() return; ############################################################################################################################ ################################################# Swelling functions ####################################################### ############################################################################################################################ def swelling(self, lumen) : """ self.swelling(lumen) Calculates the input flux for the area fo a given lumen, due to swelling. Input ----- lumen : int Index of the lumen """ area = self.get_area(lumen) theta = self.theta[lumen] flux_swelling = self.swelling_rate * 4 * theta * np.sqrt(area)/ self.area_factor[lumen] #print flux_swelling return flux_swelling
en
0.620598
# Library for the dynamics of a lumen network # The lumen are 2 dimensional and symmetric and connected with 1 dimensional tubes # # Created by <NAME>, 2018 # Modified by <NAME>--Serandour on 8/04/2019 network.py conf.init Defines the class network and associated functions Imports ------- Libraries : numpy, os, math Created by <NAME> Modified by <NAME> on 8/06/2018 Modified by <NAME>--Serandour on 8/04/2019 Initialization of the object network All properties needed for the simulation are read and initialized Input ----- network_folder : str out_path : str, path-like t_step : float Time step of the simulation. Note that if the simulation is adaptative, this time step will change. tube_radius : float, optional, default = 0.01 Radius of the tube connecting lumens. Define the condition for empty lumens. friction : float, optional, default = 1 Friction constant for the fluid circulating through pipes. swelling : bool, optional, default = False Swelling option for the simulation. True if swelling is included, False otherwise. swelling_rate : float, optional, default = 0. Swelling rate value in case the swelling is considered. Make sure the rate is not to big to avoid non-converging simulations. save_area_dat : bool, optional, default = False Save area option. True if areas are saved in area.dat, False otherwise. # Reading properties of the lumen # Reading links between two lumen # Reading links between bridge and lumen # Reading links between two bridges # Surface tension ratio # Possibility of asymmetric lumen is not included # Resistances # Radius of the tube connecting the lumen and the bridges # Friction coefficient; friction * length = resistance # Opening angle of the lumen (angle between curvature and tube) # Area factor for expressing the pressure in terms of the area instead of the radius # Ending time: time at which only one lumen is remaining # Time step for the output of the area evolution # Creating output file for the area evolution, events, error messages # Area distribution after only one lumen is remaining # Current time step of the simulation # List of empty lumen (area < tube_radius **2) # Swelling # Save area ############################################################################################################################ ########################################################## Dynamics ######################################################## ############################################################################################################################ Determines the flux/ area change for each lumen of the network, main function of network.py Input ----- self : network object Needs to be called by a class object t : float Actual time step (not needed for the calculation of the flux, but required for the used integration method in network_simulation.py state : float array The current area of the lumens Returns ------- flux : float array Contains the area change for each lumen in dt # Initialization of the array containing the area change (index == lumen ID) # If only one lumen remains -> End of simulation, flux is zero (needed as for the integration method used, no dynamic stop is possible) # Setting the end time for the output file area.log # more than one lumen remaining: calculation of the flux # Adapting network to new state: Empty lumen are removed and graph is reconnected # Area change between directly connected lumen # Calculating artificial pressure at each bridge; linear system of equations, with flux(bridge) = 0, the bridge does not gain or loose area # Area change between lumen-bridges # Area change due to swelling # Saving area for the time step given in the configuration file Determines the flux/ area change for each lumen due to the connection between lumen and lumen Input ----- self network object needs to be called by a class object flux float array vector containing the area change for each lumen; index = lumen ID Returns ------- flux float array area changes due to lumen-lumen connection added to the vector passed # for each connection between two lumen # first lumen # second lumen # flux from lumen 2 to lumen 1 Determines the pressure at each bridge for each bridge the total flux is 0, meaning that the bridge does not gain or loose area this gives a linear equation system, which can be solved The connections are taken from the files bridge_lumen.dat and bridge_bridge.dat For Information about the equations see the documentation to the code Input ----- self : network object Needs to be called by a class object Returns ------- pressure_bridges : float array Pressure at each bridge # sum of the resistences around one bridge # sum of pressure over resistance between one bridge and all directly connected lumen # matrix to calculate the pressure at each bridge # For each connection between bridge and lumen # For each connection between bridge and bridge # Solving linear problem with the pressure at each bridge as solution Determines the flux/ area change for each lumen due to the connection between lumen and bridge Input ----- self : network object Needs to be called by a class object Returns ------- flux : float array Area changes due to bridge-lumen connection added to the vector passed # Area change in one bridge; should be 0; calculated as control value # For each connection between bridge and bridge # For each connection between bridge and lumen Determines the flux/ area change for each lumen due to sewlling Input ----- self : network object Needs to be called by a class object Returns ------- flux : float array Area changes due to bridge-lumen connection added to the vector passed # for each lumen (lumen is the index of the lumen's area) # if not empty # then add the swelling contribution ############################################################################################################################ ###################################################### Removing Functions ##################################################### ############################################################################################################################ Determines and removes empty lumen Calls a function to obtain a list of empty lumen and passes the list to a function to remove them and reconnect the network Input ----- self : network object Needs to be called by a class object Returns ------- no return # Creating a list of empty lumen # Removing empty lumen and reconnecting the network #print event Removes the lumen that are passed and connects the neighbors of these lumen Input ----- self : network object Needs to be called by a class object lumen_to_remove : int list List of lumen to be removed Returns ------- no return # For each lumen that has to be removed # List of connected lumen # List of connected bridges # Lumen had two connections, this means that it disappears and the two connected parts get directly connected, the resistance for the new link is the sum of the resistance of the two previous connections # Lumen was connected to two lumen -> new connection between lumen and lumen #print 'lumen_lumen connexion (' + str(neighbours[0][0]) + ', ' + str(neighbours[1][0]) + ')' # Lumen was connected to a lumen and a bridge -> new connection between lumen and bridge #print 'lumen_bridge connexion (' + str(bridges[0][0]) + ', ' + str(neighbours[0][0]) + ')' # Lumen was connected to two bridges -> new connection between bridge and bridge #print 'bridge_bridge connexion (' + str(bridges[0][0]) + ', ' + str(bridges[1][0]) + ')' # Lumen had more than two connections -> becomes a bridge, the resistances remain the same but the connections are changed to connections to a bridge Removes a connection between two lumen Input ----- self : network object Needs to be called by a class object lumen_1 : int First lumen of the connection lumen_2 : Second lumen of the connection Returns ------- no return # Due to data structure first lumen must be smaller than second lumen # Find connection in lumen_lumen file and remove it # For each line in lumen_lumen until connection is found # If connection is found removing it #print event # Look at next line ############################################################################################################################ ###################################################### Get Functions ##################################################### ############################################################################################################################ Gets the IDs of the empty lumen Empty means that the area is smaller than the tube_radius^2 Input ----- self : network object Needs to be called by a class object Returns ------- empty_lumen_list : int list Contains the IDs of the empty lumens # For each lumen ID # If area is smaller than the treshhold Gets the lumen that are directly connected to the lumen passed on and deletes the connections Input ----- self : network object Needs to be called by a class object lumen : int ID of a lumen Returns ------- neighbour_list : int list ID of all lumen that are directly connected to the lumen passed on # Going through links in lumen_lumen.dat Gets the bridges that are directly connected to the lumen passed on Input ----- self : network object Needs to be called by a class object lumen : int ID of a lumen Returns ------- neighbour_list : int list ID of all lumen that are directly connected to the lumen passed on # Going through the links in bridge_lumen.dat ############################################################################################################################ #################################################### Creating Functions ################################################### ############################################################################################################################ Creates a link between two lumen in lumen_lumen.dat Input ----- self : network object Needs to be called by a class object link : float array [ID lumen1, ID lumen2, length] Returns ------- no return # no self-loops allowed # Ensuring: lumen_1 < lumen_2 # Finding line in lumen_lumen.dat, to keep the sorting # Creating the link in lumen_lumen.dat Creates a link between a lumen and a bridge in bridge_lumen.dat Input ----- self : network object Needs to be called by a class object link : float array [ID bridge, ID lumen, length] Returns ------- no return # Creating the link in bridge_lumen.dat Creates a link between two bridges in bridge_bridge.dat Input ----- self : network object Needs to be called by a class object link : float array [ID bridge1, ID bridge2, length] Returns ------- no return # Creating the link in bridge_bridge.dat Creates a new bridge connected with the lumen and bridges passed on Input ----- self : network object Needs to be called by a class object lumen : int list [[lumen ID, length], [lumen ID, length],.....] lumen IDs to which the new bridge should be connected to bridge : int list [[bridge ID, length], [bridge ID, length],.....] bridge IDs to which the new bridge should be connected to Returns ------- no return ##### # ID of the new bridge # Bridge ID counter, contains the ID of the next new bridge # For each lumen that should be connected to the new bridge # Create link in bridge_lumen.dat # For each lumen that should be connected to the new bridge # Create link in bridge_bridge.dat ############################################################################################################################ ################################ Geometric Functions for area and Pressure ############################################### ############################################################################################################################ Sets the angle theta Calculates the angle theta, angle between the lumen and the tube Input ----- self : network object Needs to be called by a class object Returns ------- theta : float list Theta value for each lumen #cos = (2*self.alpha[i]-(4*self.alpha[i]**2-self.delta[i]**2+1)/(4*self.alpha[i]))/self.delta[i] ## Old version, for assymmetric lumen #theta.append(math.acos(cos)) Sets the area factor, needed to express the pressure in terms of the area instead of the curvature radius Input ----- self : network object Needs to be called by a class object Returns ------- area_factor : float list Area factor for each lumen Calculates the length/2 parallel to the 'tube' where the membrane is not attached for a given lumen Input ----- lumen : int ID of the lumen Returns ------- radius : float Length/2 of the opening radius Calculates the area in one half of the lumen (for symmetric lumen) Input ----- lumen : int ID of the lumen Returns ------- area : float Area/2 of the lumen Calculates the pressure inside the lumen (for symmetric lumen) Input ----- lumen : int ID of the lumen Returns ------- pressure : float Pressure of the lumen # Avoid dividing by zero ############################################################################################################################ ################################################# Reading Functions ######################################################## ############################################################################################################################ Reading the file with links between two lumens Input ----- lumen_lumen_file : str File path to file with the links between two lumens Returns ------- lumen_lumen : float list [lumen1, lumen2, length] Information about the links between two lumens # If the file is not empty Reading the file with links between bridge and lumen Input ----- bridge_lumen_file : str File path to file with the links between bridge and lumen Returns ------- bridge_lumen : float list [bridge, lumen, length] Information about the links between bridge and lumen num_bridges : int Number of bridge_lumen links # If the file is empty # number of existing bridges # number of existing bridges Reading the file with links between two bridge Input ----- bridge_bridge_file : str File path to file with the links between two bridge Returns ------- bridge_bridge : float list [bridge1, bridge2, length] Information about the links between two bridge num : int Number of bridge_bridge links # If the file is empty ############################################################################################################################ ################################################# Output functions ######################################################### ############################################################################################################################ Saves each event in the output folder in the file event.dat Events like a lumen disappearing, reconnections in the graph Input ----- event : str Message of the event start : boolean True: File is created False: the message is stored in the file Returns ------ no return Saves errors in the output folder in the file error.dat Errors like volume loss Input ----- error : string Message of the event start : boolean True: File is created False: the message is stored in the file Returns ------ no return Saves the volume evolution in the output folder in the file area.dat Input ----- start : boolean True: File is created False: the message is stored in the file Returns ------ no return ############################################################################################################################ ################################################# Swelling functions ####################################################### ############################################################################################################################ self.swelling(lumen) Calculates the input flux for the area fo a given lumen, due to swelling. Input ----- lumen : int Index of the lumen #print flux_swelling
2.933927
3
scripts/upsampling_demo.py
always-newbie161/pyprobml
2
9890
# Illustrate upsampling in 2d # Code from <NAME> # https://machinelearningmastery.com/generative_adversarial_networks/ import tensorflow as tf from tensorflow import keras from numpy import asarray #from keras.models import Sequential from tensorflow.keras.models import Sequential #from keras.layers import UpSampling2D from tensorflow.keras.layers import UpSampling2D X = asarray([[1, 2], [3, 4]]) X = asarray([[1, 2, 3], [4, 5, 6], [7,8,9]]) print(X) nr = X.shape[0] nc = X.shape[1] # reshape input data into one sample a sample with a channel X = X.reshape((1, nr, nc, 1)) model = Sequential() model.add(UpSampling2D(input_shape=(nr, nc, 1))) # nearest neighbor yhat = model.predict(X) yhat = yhat.reshape((2*nr, 2*nc)) print(yhat) model = Sequential() model.add(UpSampling2D(input_shape=(nc, nc, 1), interpolation='bilinear')) yhat = model.predict(X) yhat = yhat.reshape((2*nr, 2*nc)) print(yhat)
# Illustrate upsampling in 2d # Code from <NAME> # https://machinelearningmastery.com/generative_adversarial_networks/ import tensorflow as tf from tensorflow import keras from numpy import asarray #from keras.models import Sequential from tensorflow.keras.models import Sequential #from keras.layers import UpSampling2D from tensorflow.keras.layers import UpSampling2D X = asarray([[1, 2], [3, 4]]) X = asarray([[1, 2, 3], [4, 5, 6], [7,8,9]]) print(X) nr = X.shape[0] nc = X.shape[1] # reshape input data into one sample a sample with a channel X = X.reshape((1, nr, nc, 1)) model = Sequential() model.add(UpSampling2D(input_shape=(nr, nc, 1))) # nearest neighbor yhat = model.predict(X) yhat = yhat.reshape((2*nr, 2*nc)) print(yhat) model = Sequential() model.add(UpSampling2D(input_shape=(nc, nc, 1), interpolation='bilinear')) yhat = model.predict(X) yhat = yhat.reshape((2*nr, 2*nc)) print(yhat)
en
0.740191
# Illustrate upsampling in 2d # Code from <NAME> # https://machinelearningmastery.com/generative_adversarial_networks/ #from keras.models import Sequential #from keras.layers import UpSampling2D # reshape input data into one sample a sample with a channel # nearest neighbor
3.241031
3
V2RaycSpider1225/src/BusinessCentralLayer/scaffold.py
njchj/V2RayCloudSpider
1
9891
<gh_stars>1-10 __all__ = ['scaffold', 'command_set'] from gevent import monkey monkey.patch_all() import csv import os import sys import time import shutil from typing import List import gevent from src.BusinessCentralLayer.setting import logger, DEFAULT_POWER, CHROMEDRIVER_PATH, \ REDIS_MASTER, SERVER_DIR_DATABASE_CACHE, SERVER_DIR_CLIENT_DEPORT, SERVER_PATH_DEPOT_VCS, SERVER_DIR_CACHE_BGPIC, \ REDIS_SLAVER_DDT, CRAWLER_SEQUENCE, terminal_echo, SERVER_DIR_DATABASE_LOG, SERVER_DIR_SSPANEL_MINING command_set = { # --------------------------------------------- # 部署接口 # --------------------------------------------- 'deploy': "部署项目(定时任务/Flask 开启与否取决于yaml配置文件)", # --------------------------------------------- # 调试接口 # --------------------------------------------- "clear": "清理系统运行缓存", "decouple": "立即唤醒一次subs_ddt链接解耦任务", "overdue": "立即执行一次过时链接清洗任务", "run": "[请使用spawn命令替代]立即执行一次采集任务(强制使用协程加速)", "force_run": "[请使用spawn命令替代]强制执行采集任务", "remain": "读取剩余订阅数量", "ping": "测试数据库连接", "entropy": "打印采集队列", "exile": "执行队列运维脚本(高饱和强阻塞任务)", "spawn": "并发执行所有在列的采集任务", "mining": "启动一次针对STAFF host的SEO全站挖掘任务", # --------------------------------------------- # 随参调试接口 # --------------------------------------------- # usage: 解析某条订阅链接 python main.py --parse https://domain/link/token?sub=3 # usage: 解析多条订阅链接 python main.py --parse https://domain/link/token?sub=3 https://domain/link/token2?sub=3 # "--parse": """解析链接。若是订阅链接,则检测节点数量并测试ping延时""", # --------------------------------------------- # Windows 功能接口 # --------------------------------------------- "panel": "[for Windows] 打开桌面前端面板", "ash": "[for Windows] 一键清洗订阅池,并将所有类型订阅转换为Clash yaml配置文件," "借由URL Scheme自动打开Clash并下载配置文件", # --------------------------------------------- # 调用示例 # --------------------------------------------- "example": "python main.py ping" } class _ConfigQuarantine: def __init__(self): self.root = [ SERVER_DIR_CLIENT_DEPORT, SERVER_PATH_DEPOT_VCS, SERVER_DIR_DATABASE_CACHE, SERVER_DIR_CACHE_BGPIC ] self.flag = False def set_up_file_tree(self, root): """ --/qinse/V2RaycSpider{verNum} --BCL --BLL --BVL --Database --client_depot --vcs.csv --logs --*error.log --*runtime.log --temp_cache --*AnyTempCacheFile... --*CrawlFetchHistory.txt --fake_useragent_0.1.11.json --*tests """ # 检查默认下载地址是否残缺 深度优先初始化系统文件 for child_ in root: if not os.path.exists(child_): self.flag = True try: # 初始化文件夹 if os.path.isdir(child_) or not os.path.splitext(child_)[-1]: os.mkdir(child_) logger.success(f"系统文件链接成功->{child_}") # 初始化文件 else: if child_ == SERVER_PATH_DEPOT_VCS: try: with open(child_, 'w', encoding='utf-8', newline='') as fpx: csv.writer(fpx).writerow(['version', 'title']) logger.success(f"系统文件链接成功->{child_}") except Exception as ep: logger.exception(f"Exception{child_}{ep}") except Exception as ep: logger.exception(ep) @staticmethod def check_config(call_driver: bool = False): chromedriver_not_found_error = "<ScaffoldGuider> ForceRun || ChromedriverNotFound ||" \ "未查找到chromedriver驱动,请根据技术文档正确配置\n" \ ">>> https://github.com/QIN2DIM/V2RayCloudSpider" # if not all(SMTP_ACCOUNT.values()): # logger.warning('您未正确配置<通信邮箱>信息(SMTP_ACCOUNT)') # if not SERVERCHAN_SCKEY: # logger.warning("您未正确配置<Server酱>的SCKEY") if not all([REDIS_SLAVER_DDT.get("host"), REDIS_SLAVER_DDT.get("password")]): logger.warning('您未正确配置<Redis-Slave> 本项目资源拷贝功能无法使用,但不影响系统正常运行。') if not all([REDIS_MASTER.get("host"), REDIS_MASTER.get("password")]): logger.error("您未正确配置<Redis-Master> 此配置为“云彩姬”的核心组件,请配置后重启项目!") sys.exit() # 当需要调用的接口涉及到driver操作时抛出 if call_driver and not os.path.exists(CHROMEDRIVER_PATH): logger.error(chromedriver_not_found_error) sys.exit() def run(self): try: if [cq for cq in reversed(self.root) if not os.path.exists(cq)]: logger.warning('系统文件残缺!') logger.debug("启动<工程重构>模块...") self.set_up_file_tree(self.root) self.check_config() finally: if self.flag: logger.success(">>> 运行环境链接完成,请重启项目") logger.warning(">>> 提醒您正确配置Chrome及对应版本的ChromeDriver") sys.exit() _ConfigQuarantine().run() class _ScaffoldGuider: # __slots__ = list(command_set.keys()) def __init__(self): # 脚手架公开接口 self.scaffold_ruler = [i for i in self.__dir__() if i.startswith('_scaffold_')] self.command2solution = { 'deploy': self._scaffold_deploy, 'decouple': self._scaffold_decouple, 'overdue': self._scaffold_overdue, 'spawn': self._scaffold_spawn, # 'run': self._scaffold_run, # 'force_run': self._scaffold_force_run, 'remain': self._scaffold_remain, 'ping': self._scaffold_ping, 'panel': self._scaffold_panel, 'entropy': self._scaffold_entropy, 'ash': self._scaffold_ash, 'mining': self._scaffold_mining, } def startup(self, driver_command_set: List[str]): """ 仅支持单进程使用 @param driver_command_set: 在空指令时列表仅有1个元素,表示启动路径 @return: """ # logger.info(f">>> {' '.join(driver_command_set)}") # ------------------------------- # TODO 优先级0:预处理指令集 # ------------------------------- # CommandId or List[CommandId] driver_command: List[str] = [] # 未输入任何指令 列出脚手架简介 if len(driver_command_set) == 1: print("\n".join([f">>> {menu[0].ljust(20, '-')}|| {menu[-1]}" for menu in command_set.items()])) return True # 输入立即指令 转译指令 if len(driver_command_set) == 2: driver_command = [driver_command_set[-1].lower(), ] # 输入指令集 转译指令集 elif len(driver_command_set) > 2: driver_command = list({command.lower() for command in driver_command_set[1:]}) # 捕获意料之外的情况 if not isinstance(driver_command, list): return True # ------------------------------- # TODO 优先级1:解析运行参数 # ------------------------------- # TODO --help 帮助菜单(继续完善相关功能) # 使用该参数时系统不解析运行指令 if '--help' in driver_command: logger.info(">>>GuiderHelp || 帮助菜单") driver_command.remove("--help") for command_ in driver_command: introduction = command_set.get(command_) if introduction: print(f"> {command_.ljust(20, '-')}|| {introduction}") else: print(f"> {command_}指令不存在") return True # 智能采集 解析目标 if '--parse' in driver_command: driver_command.remove('--parse') task_list = [] for url_ in reversed(driver_command): if url_.startswith("http") or url_.startswith("ssr") or url_.startswith("vmess"): task_list.append(gevent.spawn(self._scaffold_parse, url=url_)) gevent.joinall(task_list) return True # 清除系统缓存 if 'clear' in driver_command: driver_command.remove('clear') self._scaffold_clear() return True # ------------------------------- # TODO 优先级2:运行单线程指令 # ------------------------------- # 协程任务队列 task_list = [] # 测试数据库连接 while driver_command.__len__() > 0: _pending_command = driver_command.pop() try: task_list.append(gevent.spawn(self.command2solution[_pending_command])) except KeyError as e: logger.warning(f'脚手架暂未授权指令<{_pending_command}> {e}') # 并发执行以上指令 gevent.joinall(task_list) # ------------------------------- # TODO 优先级3:自定义参数部署(阻塞线程) # ------------------------------- if 'deploy' in driver_command: self._scaffold_deploy() @staticmethod def _scaffold_deploy(): # logger.info("<ScaffoldGuider> Deploy || MainProcess") from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.run(deploy_=True) @staticmethod def _scaffold_clear(): _permission = { "logs": input(terminal_echo("是否清除所有运行日志[y]?", 2)), "cache": input(terminal_echo("是否清除所有运行缓存[y]?", 2)) } # 清除日志 ~/database/logs if os.path.exists(SERVER_DIR_DATABASE_LOG) and _permission['logs'].startswith("y"): history_logs = os.listdir(SERVER_DIR_DATABASE_LOG) for _log_file in history_logs: if len(_log_file.split('.')) > 2: _log_path = os.path.join(SERVER_DIR_DATABASE_LOG, _log_file) os.remove(_log_path) terminal_echo(f"清除运行日志-->{_log_path}", 3) # 清除运行缓存 ~/database/ if _permission['cache'].startswith("y"): cache_blocks = { # ~/database/temp_cache/ SERVER_DIR_DATABASE_CACHE, # ~/database/staff_hosts/ SERVER_DIR_SSPANEL_MINING, } for block in cache_blocks: # 扫描文件 if os.path.exists(block): _files = [os.path.join(block, i) for i in os.listdir(block)] # 清除文件 for _file in _files: if os.path.isfile(_file): os.remove(_file) else: shutil.rmtree(_file) os.mkdir(_file) terminal_echo(f"清除运行缓存-->{_file}", 3) terminal_echo("系统缓存文件清理完毕", 1) @staticmethod def _scaffold_decouple(): logger.info("<ScaffoldGuider> Decouple || General startup") from src.BusinessLogicLayer.plugins.accelerator import SubscribesCleaner SubscribesCleaner(debug=True).interface(power=DEFAULT_POWER) @staticmethod def _scaffold_overdue(): logger.info("<ScaffoldGuider> Overdue || Redis DDT") from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.ddt() @staticmethod def _scaffold_spawn(): _ConfigQuarantine.check_config(call_driver=True) logger.info("<ScaffoldGuider> Spawn || MainCollector") from src.BusinessLogicLayer.cluster.slavers import __entropy__ from src.BusinessLogicLayer.plugins.accelerator import booster booster(docker=__entropy__, silence=True, power=DEFAULT_POWER, assault=True) @staticmethod def _scaffold_run(): _ConfigQuarantine.check_config(call_driver=True) logger.info("<ScaffoldGuider> Run || MainCollector") from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.run(deploy_=False) @staticmethod def _scaffold_force_run(): _ConfigQuarantine.check_config(call_driver=True) logger.info("<ScaffoldGuider> ForceRun || MainCollector") from src.BusinessLogicLayer.plugins.accelerator import ForceRunRelease ForceRunRelease(task_docker=CRAWLER_SEQUENCE).interface() @staticmethod def _scaffold_remain(): from src.BusinessCentralLayer.middleware.subscribe_io import select_subs_to_admin tracer = [f"{tag[0]}\n采集类型:{info_[0]}\n存活数量:{tag[-1]}" for info_ in select_subs_to_admin(select_netloc=None, _debug=False)['info'].items() for tag in info_[-1].items()] for i, tag in enumerate(tracer): print(f">>> [{i + 1}/{tracer.__len__()}]{tag}") @staticmethod def _scaffold_ping(): from src.BusinessCentralLayer.middleware.redis_io import RedisClient logger.info(f"<ScaffoldGuider> Ping || {RedisClient().test()}") @staticmethod def _scaffold_parse(url, _unused_mode: str = "subscribe"): logger.info(f">>> PARSE --> {url}") from src.BusinessLogicLayer.plugins.accelerator import cleaner # 检查路径完整性 if not os.path.exists(SERVER_DIR_DATABASE_CACHE): os.mkdir(SERVER_DIR_DATABASE_CACHE) # 调取API解析链接 result = cleaner.subs2node(url) if result and isinstance(result, dict): _, info, nodes = result.values() # 节点数量 减去无效的注释项 _unused_node_num = nodes.__len__() - 2 if nodes.__len__() - 2 >= 0 else 0 token_ = '' if info.get('token') is None else info.get('token') # 缓存数据 cache_sub2node = os.path.join(SERVER_DIR_DATABASE_CACHE, f'sub2node_{token_}.txt') with open(cache_sub2node, 'w', encoding="utf8") as f: for node in nodes: f.write(f"{node}\n") # 自动打开缓存文件,仅在parse一个链接时启用 # os.startfile(cache_sub2node) cleaner.node2detail(nodes[0]) else: return False @staticmethod def _scaffold_panel(): from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.system_panel() @staticmethod def _scaffold_entropy(_debug=False): from src.BusinessLogicLayer.cluster.slavers import __entropy__ for i, host_ in enumerate(__entropy__): print(f">>> [{i + 1}/{__entropy__.__len__()}]{host_['name']}") print(f"注册链接: {host_['register_url']}") print(f"存活周期: {host_['life_cycle']}天") print(f"采集类型: {'&'.join([f'{j[0].lower()}' for j in host_['hyper_params'].items() if j[-1]])}\n") @staticmethod def _scaffold_exile(task_sequential=4): logger.debug(f"<ScaffoldGuider> Exile[0/{task_sequential}] || Running scaffold exile...") time.sleep(0.3) # task1: 检查队列任务 logger.debug(f"<ScaffoldGuider> Exile[1/{task_sequential}] || Checking the task queue...") time.sleep(0.3) _ScaffoldGuider._scaffold_entropy(_debug=True) # logger.success(f">>> [Mission Completed] || entropy") # task2: decouple logger.debug(f"<ScaffoldGuider> Exile[2/{task_sequential}] || Cleaning the subscribe pool...") time.sleep(0.3) _ScaffoldGuider._scaffold_decouple() # logger.success(f">>> [Mission Completed] || decouple") # task3: overdue logger.debug(f"<ScaffoldGuider> Exile[3/{task_sequential}] || Cleaning timed out subscribes...") time.sleep(0.3) _ScaffoldGuider._scaffold_overdue() # logger.success(">>> [Mission Completed] || overdue") # finally: print task-queue, remaining subscribes logger.debug(f"<ScaffoldGuider> Exile[{task_sequential}/{task_sequential}] || Outputting debug data...") _ScaffoldGuider._scaffold_entropy() _ScaffoldGuider._scaffold_remain() logger.success("<ScaffoldGuider> Exile[Mission Completed] || exile") @staticmethod @logger.catch() def _scaffold_ash(): """ 无尽套娃 """ from src.BusinessLogicLayer.apis import scaffold_api logger.info("<ScaffoldGuider> ash | Clash订阅堆一键生成脚本") # -------------------------------------------------- # 参数清洗 # -------------------------------------------------- if 'win' not in sys.platform: return # -------------------------------------------------- # 运行脚本 # -------------------------------------------------- return scaffold_api.ash(debug=True, decouple=True) @staticmethod def _scaffold_mining(): """ “国外”服务器:直接运行 大陆主机:开启代理后运行 :return: """ from src.BusinessLogicLayer.apis.staff_mining import staff_api use_collector = staff_api.is_first_run() classify_dir, staff_info = staff_api.go( debug=False, silence=True, power=os.cpu_count() * 2, identity_recaptcha=False, use_collector=use_collector, use_checker=True, use_generator=False, ) staff_api.refresh_cache(mode='de-dup') print(f"\n\nSTAFF INFO\n{'_' * 32}") for element in staff_info.items(): for i, tag in enumerate(element[-1]): print(f">>> [{i + 1}/{len(element[-1])}]{element[0]}: {tag}") print(f">>> 文件导出目录: {classify_dir}") scaffold = _ScaffoldGuider()
__all__ = ['scaffold', 'command_set'] from gevent import monkey monkey.patch_all() import csv import os import sys import time import shutil from typing import List import gevent from src.BusinessCentralLayer.setting import logger, DEFAULT_POWER, CHROMEDRIVER_PATH, \ REDIS_MASTER, SERVER_DIR_DATABASE_CACHE, SERVER_DIR_CLIENT_DEPORT, SERVER_PATH_DEPOT_VCS, SERVER_DIR_CACHE_BGPIC, \ REDIS_SLAVER_DDT, CRAWLER_SEQUENCE, terminal_echo, SERVER_DIR_DATABASE_LOG, SERVER_DIR_SSPANEL_MINING command_set = { # --------------------------------------------- # 部署接口 # --------------------------------------------- 'deploy': "部署项目(定时任务/Flask 开启与否取决于yaml配置文件)", # --------------------------------------------- # 调试接口 # --------------------------------------------- "clear": "清理系统运行缓存", "decouple": "立即唤醒一次subs_ddt链接解耦任务", "overdue": "立即执行一次过时链接清洗任务", "run": "[请使用spawn命令替代]立即执行一次采集任务(强制使用协程加速)", "force_run": "[请使用spawn命令替代]强制执行采集任务", "remain": "读取剩余订阅数量", "ping": "测试数据库连接", "entropy": "打印采集队列", "exile": "执行队列运维脚本(高饱和强阻塞任务)", "spawn": "并发执行所有在列的采集任务", "mining": "启动一次针对STAFF host的SEO全站挖掘任务", # --------------------------------------------- # 随参调试接口 # --------------------------------------------- # usage: 解析某条订阅链接 python main.py --parse https://domain/link/token?sub=3 # usage: 解析多条订阅链接 python main.py --parse https://domain/link/token?sub=3 https://domain/link/token2?sub=3 # "--parse": """解析链接。若是订阅链接,则检测节点数量并测试ping延时""", # --------------------------------------------- # Windows 功能接口 # --------------------------------------------- "panel": "[for Windows] 打开桌面前端面板", "ash": "[for Windows] 一键清洗订阅池,并将所有类型订阅转换为Clash yaml配置文件," "借由URL Scheme自动打开Clash并下载配置文件", # --------------------------------------------- # 调用示例 # --------------------------------------------- "example": "python main.py ping" } class _ConfigQuarantine: def __init__(self): self.root = [ SERVER_DIR_CLIENT_DEPORT, SERVER_PATH_DEPOT_VCS, SERVER_DIR_DATABASE_CACHE, SERVER_DIR_CACHE_BGPIC ] self.flag = False def set_up_file_tree(self, root): """ --/qinse/V2RaycSpider{verNum} --BCL --BLL --BVL --Database --client_depot --vcs.csv --logs --*error.log --*runtime.log --temp_cache --*AnyTempCacheFile... --*CrawlFetchHistory.txt --fake_useragent_0.1.11.json --*tests """ # 检查默认下载地址是否残缺 深度优先初始化系统文件 for child_ in root: if not os.path.exists(child_): self.flag = True try: # 初始化文件夹 if os.path.isdir(child_) or not os.path.splitext(child_)[-1]: os.mkdir(child_) logger.success(f"系统文件链接成功->{child_}") # 初始化文件 else: if child_ == SERVER_PATH_DEPOT_VCS: try: with open(child_, 'w', encoding='utf-8', newline='') as fpx: csv.writer(fpx).writerow(['version', 'title']) logger.success(f"系统文件链接成功->{child_}") except Exception as ep: logger.exception(f"Exception{child_}{ep}") except Exception as ep: logger.exception(ep) @staticmethod def check_config(call_driver: bool = False): chromedriver_not_found_error = "<ScaffoldGuider> ForceRun || ChromedriverNotFound ||" \ "未查找到chromedriver驱动,请根据技术文档正确配置\n" \ ">>> https://github.com/QIN2DIM/V2RayCloudSpider" # if not all(SMTP_ACCOUNT.values()): # logger.warning('您未正确配置<通信邮箱>信息(SMTP_ACCOUNT)') # if not SERVERCHAN_SCKEY: # logger.warning("您未正确配置<Server酱>的SCKEY") if not all([REDIS_SLAVER_DDT.get("host"), REDIS_SLAVER_DDT.get("password")]): logger.warning('您未正确配置<Redis-Slave> 本项目资源拷贝功能无法使用,但不影响系统正常运行。') if not all([REDIS_MASTER.get("host"), REDIS_MASTER.get("password")]): logger.error("您未正确配置<Redis-Master> 此配置为“云彩姬”的核心组件,请配置后重启项目!") sys.exit() # 当需要调用的接口涉及到driver操作时抛出 if call_driver and not os.path.exists(CHROMEDRIVER_PATH): logger.error(chromedriver_not_found_error) sys.exit() def run(self): try: if [cq for cq in reversed(self.root) if not os.path.exists(cq)]: logger.warning('系统文件残缺!') logger.debug("启动<工程重构>模块...") self.set_up_file_tree(self.root) self.check_config() finally: if self.flag: logger.success(">>> 运行环境链接完成,请重启项目") logger.warning(">>> 提醒您正确配置Chrome及对应版本的ChromeDriver") sys.exit() _ConfigQuarantine().run() class _ScaffoldGuider: # __slots__ = list(command_set.keys()) def __init__(self): # 脚手架公开接口 self.scaffold_ruler = [i for i in self.__dir__() if i.startswith('_scaffold_')] self.command2solution = { 'deploy': self._scaffold_deploy, 'decouple': self._scaffold_decouple, 'overdue': self._scaffold_overdue, 'spawn': self._scaffold_spawn, # 'run': self._scaffold_run, # 'force_run': self._scaffold_force_run, 'remain': self._scaffold_remain, 'ping': self._scaffold_ping, 'panel': self._scaffold_panel, 'entropy': self._scaffold_entropy, 'ash': self._scaffold_ash, 'mining': self._scaffold_mining, } def startup(self, driver_command_set: List[str]): """ 仅支持单进程使用 @param driver_command_set: 在空指令时列表仅有1个元素,表示启动路径 @return: """ # logger.info(f">>> {' '.join(driver_command_set)}") # ------------------------------- # TODO 优先级0:预处理指令集 # ------------------------------- # CommandId or List[CommandId] driver_command: List[str] = [] # 未输入任何指令 列出脚手架简介 if len(driver_command_set) == 1: print("\n".join([f">>> {menu[0].ljust(20, '-')}|| {menu[-1]}" for menu in command_set.items()])) return True # 输入立即指令 转译指令 if len(driver_command_set) == 2: driver_command = [driver_command_set[-1].lower(), ] # 输入指令集 转译指令集 elif len(driver_command_set) > 2: driver_command = list({command.lower() for command in driver_command_set[1:]}) # 捕获意料之外的情况 if not isinstance(driver_command, list): return True # ------------------------------- # TODO 优先级1:解析运行参数 # ------------------------------- # TODO --help 帮助菜单(继续完善相关功能) # 使用该参数时系统不解析运行指令 if '--help' in driver_command: logger.info(">>>GuiderHelp || 帮助菜单") driver_command.remove("--help") for command_ in driver_command: introduction = command_set.get(command_) if introduction: print(f"> {command_.ljust(20, '-')}|| {introduction}") else: print(f"> {command_}指令不存在") return True # 智能采集 解析目标 if '--parse' in driver_command: driver_command.remove('--parse') task_list = [] for url_ in reversed(driver_command): if url_.startswith("http") or url_.startswith("ssr") or url_.startswith("vmess"): task_list.append(gevent.spawn(self._scaffold_parse, url=url_)) gevent.joinall(task_list) return True # 清除系统缓存 if 'clear' in driver_command: driver_command.remove('clear') self._scaffold_clear() return True # ------------------------------- # TODO 优先级2:运行单线程指令 # ------------------------------- # 协程任务队列 task_list = [] # 测试数据库连接 while driver_command.__len__() > 0: _pending_command = driver_command.pop() try: task_list.append(gevent.spawn(self.command2solution[_pending_command])) except KeyError as e: logger.warning(f'脚手架暂未授权指令<{_pending_command}> {e}') # 并发执行以上指令 gevent.joinall(task_list) # ------------------------------- # TODO 优先级3:自定义参数部署(阻塞线程) # ------------------------------- if 'deploy' in driver_command: self._scaffold_deploy() @staticmethod def _scaffold_deploy(): # logger.info("<ScaffoldGuider> Deploy || MainProcess") from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.run(deploy_=True) @staticmethod def _scaffold_clear(): _permission = { "logs": input(terminal_echo("是否清除所有运行日志[y]?", 2)), "cache": input(terminal_echo("是否清除所有运行缓存[y]?", 2)) } # 清除日志 ~/database/logs if os.path.exists(SERVER_DIR_DATABASE_LOG) and _permission['logs'].startswith("y"): history_logs = os.listdir(SERVER_DIR_DATABASE_LOG) for _log_file in history_logs: if len(_log_file.split('.')) > 2: _log_path = os.path.join(SERVER_DIR_DATABASE_LOG, _log_file) os.remove(_log_path) terminal_echo(f"清除运行日志-->{_log_path}", 3) # 清除运行缓存 ~/database/ if _permission['cache'].startswith("y"): cache_blocks = { # ~/database/temp_cache/ SERVER_DIR_DATABASE_CACHE, # ~/database/staff_hosts/ SERVER_DIR_SSPANEL_MINING, } for block in cache_blocks: # 扫描文件 if os.path.exists(block): _files = [os.path.join(block, i) for i in os.listdir(block)] # 清除文件 for _file in _files: if os.path.isfile(_file): os.remove(_file) else: shutil.rmtree(_file) os.mkdir(_file) terminal_echo(f"清除运行缓存-->{_file}", 3) terminal_echo("系统缓存文件清理完毕", 1) @staticmethod def _scaffold_decouple(): logger.info("<ScaffoldGuider> Decouple || General startup") from src.BusinessLogicLayer.plugins.accelerator import SubscribesCleaner SubscribesCleaner(debug=True).interface(power=DEFAULT_POWER) @staticmethod def _scaffold_overdue(): logger.info("<ScaffoldGuider> Overdue || Redis DDT") from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.ddt() @staticmethod def _scaffold_spawn(): _ConfigQuarantine.check_config(call_driver=True) logger.info("<ScaffoldGuider> Spawn || MainCollector") from src.BusinessLogicLayer.cluster.slavers import __entropy__ from src.BusinessLogicLayer.plugins.accelerator import booster booster(docker=__entropy__, silence=True, power=DEFAULT_POWER, assault=True) @staticmethod def _scaffold_run(): _ConfigQuarantine.check_config(call_driver=True) logger.info("<ScaffoldGuider> Run || MainCollector") from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.run(deploy_=False) @staticmethod def _scaffold_force_run(): _ConfigQuarantine.check_config(call_driver=True) logger.info("<ScaffoldGuider> ForceRun || MainCollector") from src.BusinessLogicLayer.plugins.accelerator import ForceRunRelease ForceRunRelease(task_docker=CRAWLER_SEQUENCE).interface() @staticmethod def _scaffold_remain(): from src.BusinessCentralLayer.middleware.subscribe_io import select_subs_to_admin tracer = [f"{tag[0]}\n采集类型:{info_[0]}\n存活数量:{tag[-1]}" for info_ in select_subs_to_admin(select_netloc=None, _debug=False)['info'].items() for tag in info_[-1].items()] for i, tag in enumerate(tracer): print(f">>> [{i + 1}/{tracer.__len__()}]{tag}") @staticmethod def _scaffold_ping(): from src.BusinessCentralLayer.middleware.redis_io import RedisClient logger.info(f"<ScaffoldGuider> Ping || {RedisClient().test()}") @staticmethod def _scaffold_parse(url, _unused_mode: str = "subscribe"): logger.info(f">>> PARSE --> {url}") from src.BusinessLogicLayer.plugins.accelerator import cleaner # 检查路径完整性 if not os.path.exists(SERVER_DIR_DATABASE_CACHE): os.mkdir(SERVER_DIR_DATABASE_CACHE) # 调取API解析链接 result = cleaner.subs2node(url) if result and isinstance(result, dict): _, info, nodes = result.values() # 节点数量 减去无效的注释项 _unused_node_num = nodes.__len__() - 2 if nodes.__len__() - 2 >= 0 else 0 token_ = '' if info.get('token') is None else info.get('token') # 缓存数据 cache_sub2node = os.path.join(SERVER_DIR_DATABASE_CACHE, f'sub2node_{token_}.txt') with open(cache_sub2node, 'w', encoding="utf8") as f: for node in nodes: f.write(f"{node}\n") # 自动打开缓存文件,仅在parse一个链接时启用 # os.startfile(cache_sub2node) cleaner.node2detail(nodes[0]) else: return False @staticmethod def _scaffold_panel(): from src.BusinessCentralLayer.middleware.interface_io import SystemInterface SystemInterface.system_panel() @staticmethod def _scaffold_entropy(_debug=False): from src.BusinessLogicLayer.cluster.slavers import __entropy__ for i, host_ in enumerate(__entropy__): print(f">>> [{i + 1}/{__entropy__.__len__()}]{host_['name']}") print(f"注册链接: {host_['register_url']}") print(f"存活周期: {host_['life_cycle']}天") print(f"采集类型: {'&'.join([f'{j[0].lower()}' for j in host_['hyper_params'].items() if j[-1]])}\n") @staticmethod def _scaffold_exile(task_sequential=4): logger.debug(f"<ScaffoldGuider> Exile[0/{task_sequential}] || Running scaffold exile...") time.sleep(0.3) # task1: 检查队列任务 logger.debug(f"<ScaffoldGuider> Exile[1/{task_sequential}] || Checking the task queue...") time.sleep(0.3) _ScaffoldGuider._scaffold_entropy(_debug=True) # logger.success(f">>> [Mission Completed] || entropy") # task2: decouple logger.debug(f"<ScaffoldGuider> Exile[2/{task_sequential}] || Cleaning the subscribe pool...") time.sleep(0.3) _ScaffoldGuider._scaffold_decouple() # logger.success(f">>> [Mission Completed] || decouple") # task3: overdue logger.debug(f"<ScaffoldGuider> Exile[3/{task_sequential}] || Cleaning timed out subscribes...") time.sleep(0.3) _ScaffoldGuider._scaffold_overdue() # logger.success(">>> [Mission Completed] || overdue") # finally: print task-queue, remaining subscribes logger.debug(f"<ScaffoldGuider> Exile[{task_sequential}/{task_sequential}] || Outputting debug data...") _ScaffoldGuider._scaffold_entropy() _ScaffoldGuider._scaffold_remain() logger.success("<ScaffoldGuider> Exile[Mission Completed] || exile") @staticmethod @logger.catch() def _scaffold_ash(): """ 无尽套娃 """ from src.BusinessLogicLayer.apis import scaffold_api logger.info("<ScaffoldGuider> ash | Clash订阅堆一键生成脚本") # -------------------------------------------------- # 参数清洗 # -------------------------------------------------- if 'win' not in sys.platform: return # -------------------------------------------------- # 运行脚本 # -------------------------------------------------- return scaffold_api.ash(debug=True, decouple=True) @staticmethod def _scaffold_mining(): """ “国外”服务器:直接运行 大陆主机:开启代理后运行 :return: """ from src.BusinessLogicLayer.apis.staff_mining import staff_api use_collector = staff_api.is_first_run() classify_dir, staff_info = staff_api.go( debug=False, silence=True, power=os.cpu_count() * 2, identity_recaptcha=False, use_collector=use_collector, use_checker=True, use_generator=False, ) staff_api.refresh_cache(mode='de-dup') print(f"\n\nSTAFF INFO\n{'_' * 32}") for element in staff_info.items(): for i, tag in enumerate(element[-1]): print(f">>> [{i + 1}/{len(element[-1])}]{element[0]}: {tag}") print(f">>> 文件导出目录: {classify_dir}") scaffold = _ScaffoldGuider()
zh
0.323723
# --------------------------------------------- # 部署接口 # --------------------------------------------- # --------------------------------------------- # 调试接口 # --------------------------------------------- # --------------------------------------------- # 随参调试接口 # --------------------------------------------- # usage: 解析某条订阅链接 python main.py --parse https://domain/link/token?sub=3 # usage: 解析多条订阅链接 python main.py --parse https://domain/link/token?sub=3 https://domain/link/token2?sub=3 # "--parse": """解析链接。若是订阅链接,则检测节点数量并测试ping延时""", # --------------------------------------------- # Windows 功能接口 # --------------------------------------------- # --------------------------------------------- # 调用示例 # --------------------------------------------- --/qinse/V2RaycSpider{verNum} --BCL --BLL --BVL --Database --client_depot --vcs.csv --logs --*error.log --*runtime.log --temp_cache --*AnyTempCacheFile... --*CrawlFetchHistory.txt --fake_useragent_0.1.11.json --*tests # 检查默认下载地址是否残缺 深度优先初始化系统文件 # 初始化文件夹 # 初始化文件 # if not all(SMTP_ACCOUNT.values()): # logger.warning('您未正确配置<通信邮箱>信息(SMTP_ACCOUNT)') # if not SERVERCHAN_SCKEY: # logger.warning("您未正确配置<Server酱>的SCKEY") # 当需要调用的接口涉及到driver操作时抛出 # __slots__ = list(command_set.keys()) # 脚手架公开接口 # 'run': self._scaffold_run, # 'force_run': self._scaffold_force_run, 仅支持单进程使用 @param driver_command_set: 在空指令时列表仅有1个元素,表示启动路径 @return: # logger.info(f">>> {' '.join(driver_command_set)}") # ------------------------------- # TODO 优先级0:预处理指令集 # ------------------------------- # CommandId or List[CommandId] # 未输入任何指令 列出脚手架简介 # 输入立即指令 转译指令 # 输入指令集 转译指令集 # 捕获意料之外的情况 # ------------------------------- # TODO 优先级1:解析运行参数 # ------------------------------- # TODO --help 帮助菜单(继续完善相关功能) # 使用该参数时系统不解析运行指令 # 智能采集 解析目标 # 清除系统缓存 # ------------------------------- # TODO 优先级2:运行单线程指令 # ------------------------------- # 协程任务队列 # 测试数据库连接 # 并发执行以上指令 # ------------------------------- # TODO 优先级3:自定义参数部署(阻塞线程) # ------------------------------- # logger.info("<ScaffoldGuider> Deploy || MainProcess") # 清除日志 ~/database/logs # 清除运行缓存 ~/database/ # ~/database/temp_cache/ # ~/database/staff_hosts/ # 扫描文件 # 清除文件 # 检查路径完整性 # 调取API解析链接 # 节点数量 减去无效的注释项 # 缓存数据 # 自动打开缓存文件,仅在parse一个链接时启用 # os.startfile(cache_sub2node) # task1: 检查队列任务 # logger.success(f">>> [Mission Completed] || entropy") # task2: decouple # logger.success(f">>> [Mission Completed] || decouple") # task3: overdue # logger.success(">>> [Mission Completed] || overdue") # finally: print task-queue, remaining subscribes 无尽套娃 # -------------------------------------------------- # 参数清洗 # -------------------------------------------------- # -------------------------------------------------- # 运行脚本 # -------------------------------------------------- “国外”服务器:直接运行 大陆主机:开启代理后运行 :return:
1.553839
2
python/swap_header.py
daniestevez/gr-csp
19
9892
<reponame>daniestevez/gr-csp<gh_stars>10-100 #!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2016 <NAME> <<EMAIL>>. # # This is free and unencumbered software released into the public domain. # # Anyone is free to copy, modify, publish, use, compile, sell, or # distribute this software, either in source code form or as a compiled # binary, for any purpose, commercial or non-commercial, and by any # means. # # In jurisdictions that recognize copyright laws, the author or authors # of this software dedicate any and all copyright interest in the # software to the public domain. We make this dedication for the benefit # of the public at large and to the detriment of our heirs and # successors. We intend this dedication to be an overt act of # relinquishment in perpetuity of all present and future rights to this # software under copyright law. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # For more information, please refer to <http://unlicense.org> # import numpy from gnuradio import gr import pmt import array class swap_header(gr.basic_block): """ docstring for block swap_header """ def __init__(self): gr.basic_block.__init__(self, name="swap_crc", in_sig=[], out_sig=[]) self.message_port_register_in(pmt.intern('in')) self.set_msg_handler(pmt.intern('in'), self.handle_msg) self.message_port_register_out(pmt.intern('out')) def handle_msg(self, msg_pmt): msg = pmt.cdr(msg_pmt) if not pmt.is_u8vector(msg): print "[ERROR] Received invalid message type. Expected u8vector" return packet = array.array("B", pmt.u8vector_elements(msg)) header = packet[:4] header.reverse() packet = header + packet[4:] msg_pmt = pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(len(packet), bytearray(packet))) self.message_port_pub(pmt.intern('out'), msg_pmt)
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2016 <NAME> <<EMAIL>>. # # This is free and unencumbered software released into the public domain. # # Anyone is free to copy, modify, publish, use, compile, sell, or # distribute this software, either in source code form or as a compiled # binary, for any purpose, commercial or non-commercial, and by any # means. # # In jurisdictions that recognize copyright laws, the author or authors # of this software dedicate any and all copyright interest in the # software to the public domain. We make this dedication for the benefit # of the public at large and to the detriment of our heirs and # successors. We intend this dedication to be an overt act of # relinquishment in perpetuity of all present and future rights to this # software under copyright law. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # For more information, please refer to <http://unlicense.org> # import numpy from gnuradio import gr import pmt import array class swap_header(gr.basic_block): """ docstring for block swap_header """ def __init__(self): gr.basic_block.__init__(self, name="swap_crc", in_sig=[], out_sig=[]) self.message_port_register_in(pmt.intern('in')) self.set_msg_handler(pmt.intern('in'), self.handle_msg) self.message_port_register_out(pmt.intern('out')) def handle_msg(self, msg_pmt): msg = pmt.cdr(msg_pmt) if not pmt.is_u8vector(msg): print "[ERROR] Received invalid message type. Expected u8vector" return packet = array.array("B", pmt.u8vector_elements(msg)) header = packet[:4] header.reverse() packet = header + packet[4:] msg_pmt = pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(len(packet), bytearray(packet))) self.message_port_pub(pmt.intern('out'), msg_pmt)
en
0.800403
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2016 <NAME> <<EMAIL>>. # # This is free and unencumbered software released into the public domain. # # Anyone is free to copy, modify, publish, use, compile, sell, or # distribute this software, either in source code form or as a compiled # binary, for any purpose, commercial or non-commercial, and by any # means. # # In jurisdictions that recognize copyright laws, the author or authors # of this software dedicate any and all copyright interest in the # software to the public domain. We make this dedication for the benefit # of the public at large and to the detriment of our heirs and # successors. We intend this dedication to be an overt act of # relinquishment in perpetuity of all present and future rights to this # software under copyright law. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # For more information, please refer to <http://unlicense.org> # docstring for block swap_header
1.677587
2
start.py
gleenn/dfplayer
0
9893
<gh_stars>0 #!/usr/bin/python # # Start dfplayer. import argparse import os import shutil import subprocess import sys import time _PROJ_DIR = os.path.dirname(__file__) def main(): os.chdir(_PROJ_DIR) os.environ['LD_LIBRARY_PATH'] = '/lib:/usr/lib:/usr/local/lib' arg_parser = argparse.ArgumentParser(description='Start player') arg_parser.add_argument('--gdb', action='store_true') arg_parser.add_argument('--no-reset', action='store_true') arg_parser.add_argument('--disable-net', action='store_true') arg_parser.add_argument('--mpd', action='store_true') arg_parser.add_argument('--disable-fin', action='store_true') arg_parser.add_argument('--max', action='store_true') arg_parser.add_argument('--no-sound', action='store_true') arg_parser.add_argument('--no-sound-config', action='store_true') arg_parser.add_argument('--prod', action='store_true') arg_parser.add_argument('--enable-kinect', action='store_true') args = arg_parser.parse_args() if args.prod: print 'dfplayer is sleeping for 30 seconds before startup' time.sleep(30) if not args.no_sound_config and not args.no_sound: shutil.copyfile( 'dfplayer/asoundrc.sample', '/home/' + os.getlogin() + '/.asoundrc') params = ['env/bin/dfplayer', '--listen=0.0.0.0:8081'] if args.no_reset: params.append('--no-reset') if args.no_sound: params.append('--no-sound') if args.disable_net: params.append('--disable-net') if args.disable_fin: params.append('--disable-fin') if args.enable_kinect or args.prod: params.append('--enable-kinect') if args.mpd: params.append('--mpd') if args.max or args.prod: params.append('--max') try: if args.gdb: subprocess.check_call( ['gdb', '-ex', 'run', '--args', 'env/bin/python'] + params) #['gdb', '--args', 'env/bin/python'] + params) else: subprocess.check_call(params) except KeyboardInterrupt: print 'Player is exiting via KeyboardInterrupt' except Exception, err: print sys.exc_info()[0] if args.prod: print 'dfplayer has exited and start.py script is now sleeping' time.sleep(3600) main()
#!/usr/bin/python # # Start dfplayer. import argparse import os import shutil import subprocess import sys import time _PROJ_DIR = os.path.dirname(__file__) def main(): os.chdir(_PROJ_DIR) os.environ['LD_LIBRARY_PATH'] = '/lib:/usr/lib:/usr/local/lib' arg_parser = argparse.ArgumentParser(description='Start player') arg_parser.add_argument('--gdb', action='store_true') arg_parser.add_argument('--no-reset', action='store_true') arg_parser.add_argument('--disable-net', action='store_true') arg_parser.add_argument('--mpd', action='store_true') arg_parser.add_argument('--disable-fin', action='store_true') arg_parser.add_argument('--max', action='store_true') arg_parser.add_argument('--no-sound', action='store_true') arg_parser.add_argument('--no-sound-config', action='store_true') arg_parser.add_argument('--prod', action='store_true') arg_parser.add_argument('--enable-kinect', action='store_true') args = arg_parser.parse_args() if args.prod: print 'dfplayer is sleeping for 30 seconds before startup' time.sleep(30) if not args.no_sound_config and not args.no_sound: shutil.copyfile( 'dfplayer/asoundrc.sample', '/home/' + os.getlogin() + '/.asoundrc') params = ['env/bin/dfplayer', '--listen=0.0.0.0:8081'] if args.no_reset: params.append('--no-reset') if args.no_sound: params.append('--no-sound') if args.disable_net: params.append('--disable-net') if args.disable_fin: params.append('--disable-fin') if args.enable_kinect or args.prod: params.append('--enable-kinect') if args.mpd: params.append('--mpd') if args.max or args.prod: params.append('--max') try: if args.gdb: subprocess.check_call( ['gdb', '-ex', 'run', '--args', 'env/bin/python'] + params) #['gdb', '--args', 'env/bin/python'] + params) else: subprocess.check_call(params) except KeyboardInterrupt: print 'Player is exiting via KeyboardInterrupt' except Exception, err: print sys.exc_info()[0] if args.prod: print 'dfplayer has exited and start.py script is now sleeping' time.sleep(3600) main()
en
0.116066
#!/usr/bin/python # # Start dfplayer. #['gdb', '--args', 'env/bin/python'] + params)
2.114605
2
Game_Mechanics.py
Finnder/Console-Based-Story-Game
0
9894
<gh_stars>0 def attack(): pass def defend(): pass def pass_turn(): pass def use_ability_One(kit): pass def use_ability_Two(kit): pass def end_Of_Battle(): pass
def attack(): pass def defend(): pass def pass_turn(): pass def use_ability_One(kit): pass def use_ability_Two(kit): pass def end_Of_Battle(): pass
none
1
1.345189
1
AIY/voice/cloudspeech_demo.py
Pougnator/Prometheus
0
9895
#!/usr/bin/env python3 # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A demo of the Google CloudSpeech recognizer.""" import aiy.audio import aiy.cloudspeech import aiy.voicehat import aiy.i18n import aiy.audio CONFIRM_SOUND_PATH = '/home/pi/Music/R2D2/R2_Understood.wav' CONFUSED_SOUND_PATH = '/home/pi/Music/R2D2/R2_Confused.wav' UNRECOGNISED_SOUND_PATH = '/home/pi/Music/R2D2/R2_FastBip.wav' def main(): status_ui = aiy.voicehat.get_status_ui() status_ui.status('starting') aiy.i18n.set_language_code("fr-FR") recognizer = aiy.cloudspeech.get_recognizer() recognizer.expect_phrase('allumer le feu') recognizer.expect_phrase('éteindre') recognizer.expect_phrase('clignotter') recognizer.expect_phrase('cuir') recognizer.expect_phrase('R2') button = aiy.voicehat.get_button() led = aiy.voicehat.get_led() aiy.audio.get_recorder().start() while True: status_ui.status('ready') print('Press the button and speak') button.wait_for_press() aiy.voicehat.get_status_ui().set_trigger_sound_wave('/home/pi/Music/R2D2/hotword.wav') status_ui.status('listening') WaitingForHotword = True while WaitingForHotword == True: print('Say the hotword to start') hotword = recognizer.recognize() if not hotword: print('I recognised nothing ... looping') else: if ('R2') in hotword: WaitingForHotword = False print('Playing a test sound...') aiy.audio.play_wave(CONFIRM_SOUND_PATH) print('Listening...') text = recognizer.recognize() if not text: print('Sorry, I did not hear you.') aiy.audio.play_wave(CONFUSED_SOUND_PATH) else: WaitingForHotword = True print('You said "', text, '"') if 'allumer le feu' in text: led.set_state(aiy.voicehat.LED.ON) elif 'éteindre' in text: led.set_state(aiy.voicehat.LED.OFF) elif 'clignotter' in text: led.set_state(aiy.voicehat.LED.BLINK) elif 'cuir' in text: # led.set_state(aiy.voicehat.LED.BLINK) aiy.audio.say('cuir cuir cuir moustache') elif 'goodbye' in text: break else: aiy.audio.play_wave(UNRECOGNISED_SOUND_PATH) else: print('Hotword not detected .... looping') if __name__ == '__main__': main()
#!/usr/bin/env python3 # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A demo of the Google CloudSpeech recognizer.""" import aiy.audio import aiy.cloudspeech import aiy.voicehat import aiy.i18n import aiy.audio CONFIRM_SOUND_PATH = '/home/pi/Music/R2D2/R2_Understood.wav' CONFUSED_SOUND_PATH = '/home/pi/Music/R2D2/R2_Confused.wav' UNRECOGNISED_SOUND_PATH = '/home/pi/Music/R2D2/R2_FastBip.wav' def main(): status_ui = aiy.voicehat.get_status_ui() status_ui.status('starting') aiy.i18n.set_language_code("fr-FR") recognizer = aiy.cloudspeech.get_recognizer() recognizer.expect_phrase('allumer le feu') recognizer.expect_phrase('éteindre') recognizer.expect_phrase('clignotter') recognizer.expect_phrase('cuir') recognizer.expect_phrase('R2') button = aiy.voicehat.get_button() led = aiy.voicehat.get_led() aiy.audio.get_recorder().start() while True: status_ui.status('ready') print('Press the button and speak') button.wait_for_press() aiy.voicehat.get_status_ui().set_trigger_sound_wave('/home/pi/Music/R2D2/hotword.wav') status_ui.status('listening') WaitingForHotword = True while WaitingForHotword == True: print('Say the hotword to start') hotword = recognizer.recognize() if not hotword: print('I recognised nothing ... looping') else: if ('R2') in hotword: WaitingForHotword = False print('Playing a test sound...') aiy.audio.play_wave(CONFIRM_SOUND_PATH) print('Listening...') text = recognizer.recognize() if not text: print('Sorry, I did not hear you.') aiy.audio.play_wave(CONFUSED_SOUND_PATH) else: WaitingForHotword = True print('You said "', text, '"') if 'allumer le feu' in text: led.set_state(aiy.voicehat.LED.ON) elif 'éteindre' in text: led.set_state(aiy.voicehat.LED.OFF) elif 'clignotter' in text: led.set_state(aiy.voicehat.LED.BLINK) elif 'cuir' in text: # led.set_state(aiy.voicehat.LED.BLINK) aiy.audio.say('cuir cuir cuir moustache') elif 'goodbye' in text: break else: aiy.audio.play_wave(UNRECOGNISED_SOUND_PATH) else: print('Hotword not detected .... looping') if __name__ == '__main__': main()
en
0.793099
#!/usr/bin/env python3 # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. A demo of the Google CloudSpeech recognizer. # led.set_state(aiy.voicehat.LED.BLINK)
2.343847
2
options/base_option.py
lime-j/YTMT-Strategy-1
26
9896
<reponame>lime-j/YTMT-Strategy-1 import argparse import models model_names = sorted(name for name in models.__dict__ if name.islower() and not name.startswith("__") and callable(models.__dict__[name])) class BaseOptions(): def __init__(self): self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) self.initialized = False def initialize(self): # experiment specifics self.parser.add_argument('--name', type=str, default=None, help='name of the experiment. It decides where to store samples and models') self.parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') self.parser.add_argument('--model', type=str, default='errnet_model', help='chooses which model to use.') self.parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here') self.parser.add_argument('--resume', '-r', action='store_true', help='resume from checkpoint') self.parser.add_argument('--resume_epoch', '-re', type=int, default=None, help='checkpoint to use. (default: latest') self.parser.add_argument('--seed', type=int, default=2018, help='random seed to use. Default=2018') self.parser.add_argument('--supp_eval', action='store_true', help='supplementary evaluation') self.parser.add_argument('--start_now', action='store_true', help='supplementary evaluation') self.parser.add_argument('--testr', action='store_true', help='test for reflections') self.parser.add_argument('--select', type=str, default=None) # for setting input self.parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly') self.parser.add_argument('--nThreads', default=8, type=int, help='# threads for loading data') self.parser.add_argument('--max_dataset_size', type=int, default=None, help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.') # for display self.parser.add_argument('--no-log', action='store_true', help='disable tf logger?') self.parser.add_argument('--no-verbose', action='store_true', help='disable verbose info?') self.parser.add_argument('--display_winsize', type=int, default=256, help='display window size') self.parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display') self.parser.add_argument('--display_id', type=int, default=0, help='window id of the web display (use 0 to disable visdom)') self.parser.add_argument('--display_single_pane_ncols', type=int, default=0, help='if positive, display all images in a single visdom web panel with certain number of images per row.') self.initialized = True
import argparse import models model_names = sorted(name for name in models.__dict__ if name.islower() and not name.startswith("__") and callable(models.__dict__[name])) class BaseOptions(): def __init__(self): self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) self.initialized = False def initialize(self): # experiment specifics self.parser.add_argument('--name', type=str, default=None, help='name of the experiment. It decides where to store samples and models') self.parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') self.parser.add_argument('--model', type=str, default='errnet_model', help='chooses which model to use.') self.parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here') self.parser.add_argument('--resume', '-r', action='store_true', help='resume from checkpoint') self.parser.add_argument('--resume_epoch', '-re', type=int, default=None, help='checkpoint to use. (default: latest') self.parser.add_argument('--seed', type=int, default=2018, help='random seed to use. Default=2018') self.parser.add_argument('--supp_eval', action='store_true', help='supplementary evaluation') self.parser.add_argument('--start_now', action='store_true', help='supplementary evaluation') self.parser.add_argument('--testr', action='store_true', help='test for reflections') self.parser.add_argument('--select', type=str, default=None) # for setting input self.parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly') self.parser.add_argument('--nThreads', default=8, type=int, help='# threads for loading data') self.parser.add_argument('--max_dataset_size', type=int, default=None, help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.') # for display self.parser.add_argument('--no-log', action='store_true', help='disable tf logger?') self.parser.add_argument('--no-verbose', action='store_true', help='disable verbose info?') self.parser.add_argument('--display_winsize', type=int, default=256, help='display window size') self.parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display') self.parser.add_argument('--display_id', type=int, default=0, help='window id of the web display (use 0 to disable visdom)') self.parser.add_argument('--display_single_pane_ncols', type=int, default=0, help='if positive, display all images in a single visdom web panel with certain number of images per row.') self.initialized = True
en
0.477176
# experiment specifics # for setting input # for display
2.54937
3
bookstore/__init__.py
JanhaviSoni/Book-Recommendation-Analysis
23
9897
from flask import Flask, Response from flask_basicauth import BasicAuth from flask_cors import CORS, cross_origin import os #from flask_admin import Admin,AdminIndexView #from flask_admin.contrib.sqla import ModelView from flask_sqlalchemy import SQLAlchemy as _BaseSQLAlchemy from flask_migrate import Migrate, MigrateCommand from flask_script import Manager from werkzeug.exceptions import HTTPException from flask_login import LoginManager from itsdangerous import URLSafeSerializer # import psycopg2 # import pymysql # import logging # import warnings # warnings.filterwarnings("ignore") # Initializing Flask App app = Flask(__name__) app.secret_key="Vampire" # This video demonstrates why we use CORS in our Flask App - https://www.youtube.com/watch?v=vWl5XcvQBx0 CORS(app) app.config.from_object("config.DevelopmentConfig") class SQLAlchemy(_BaseSQLAlchemy): """ This class is defined so that we can set "pool_pre_ping" to True. pool_pre_ping is a boolean flag, which when set to True, will enable the connection pool 'pre-ping' feature that tests connections for liveness upon each checkout. This prevents from dropping of database connection with our app. This class inherits the original SQLAlchemy class, and nothing else is changed except pool_pre_ping flag https://docs.sqlalchemy.org/en/13/core/pooling.html#dealing-with-disconnects https://github.com/pallets/flask-sqlalchemy/issues/589 """ def apply_pool_defaults(self, app, options): super(SQLAlchemy, self).apply_pool_defaults(app, options) options["pool_pre_ping"] = True # Creating and Initializing db object of SQLAlchemy class db = SQLAlchemy(app) db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) with app.app_context(): if db.engine.url.drivername == 'sqlite': migrate.init_app(app, db, render_as_batch=True) else: migrate.init_app(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) # Creating serializer object of URLSafeSerializer class for serializing session_token serializer = URLSafeSerializer(app.secret_key) # Here we set session_token as our user_loader. from bookstore.client.views import client from bookstore.admin.views import admin app.register_blueprint(client) app.register_blueprint(admin)
from flask import Flask, Response from flask_basicauth import BasicAuth from flask_cors import CORS, cross_origin import os #from flask_admin import Admin,AdminIndexView #from flask_admin.contrib.sqla import ModelView from flask_sqlalchemy import SQLAlchemy as _BaseSQLAlchemy from flask_migrate import Migrate, MigrateCommand from flask_script import Manager from werkzeug.exceptions import HTTPException from flask_login import LoginManager from itsdangerous import URLSafeSerializer # import psycopg2 # import pymysql # import logging # import warnings # warnings.filterwarnings("ignore") # Initializing Flask App app = Flask(__name__) app.secret_key="Vampire" # This video demonstrates why we use CORS in our Flask App - https://www.youtube.com/watch?v=vWl5XcvQBx0 CORS(app) app.config.from_object("config.DevelopmentConfig") class SQLAlchemy(_BaseSQLAlchemy): """ This class is defined so that we can set "pool_pre_ping" to True. pool_pre_ping is a boolean flag, which when set to True, will enable the connection pool 'pre-ping' feature that tests connections for liveness upon each checkout. This prevents from dropping of database connection with our app. This class inherits the original SQLAlchemy class, and nothing else is changed except pool_pre_ping flag https://docs.sqlalchemy.org/en/13/core/pooling.html#dealing-with-disconnects https://github.com/pallets/flask-sqlalchemy/issues/589 """ def apply_pool_defaults(self, app, options): super(SQLAlchemy, self).apply_pool_defaults(app, options) options["pool_pre_ping"] = True # Creating and Initializing db object of SQLAlchemy class db = SQLAlchemy(app) db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) with app.app_context(): if db.engine.url.drivername == 'sqlite': migrate.init_app(app, db, render_as_batch=True) else: migrate.init_app(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) # Creating serializer object of URLSafeSerializer class for serializing session_token serializer = URLSafeSerializer(app.secret_key) # Here we set session_token as our user_loader. from bookstore.client.views import client from bookstore.admin.views import admin app.register_blueprint(client) app.register_blueprint(admin)
en
0.679402
#from flask_admin import Admin,AdminIndexView #from flask_admin.contrib.sqla import ModelView # import psycopg2 # import pymysql # import logging # import warnings # warnings.filterwarnings("ignore") # Initializing Flask App # This video demonstrates why we use CORS in our Flask App - https://www.youtube.com/watch?v=vWl5XcvQBx0 This class is defined so that we can set "pool_pre_ping" to True. pool_pre_ping is a boolean flag, which when set to True, will enable the connection pool 'pre-ping' feature that tests connections for liveness upon each checkout. This prevents from dropping of database connection with our app. This class inherits the original SQLAlchemy class, and nothing else is changed except pool_pre_ping flag https://docs.sqlalchemy.org/en/13/core/pooling.html#dealing-with-disconnects https://github.com/pallets/flask-sqlalchemy/issues/589 # Creating and Initializing db object of SQLAlchemy class # Creating serializer object of URLSafeSerializer class for serializing session_token # Here we set session_token as our user_loader.
2.505414
3
cobl/lexicon/management/commands/stats236.py
Bibiko/CoBL-public
0
9898
<reponame>Bibiko/CoBL-public<filename>cobl/lexicon/management/commands/stats236.py # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.core.management import BaseCommand from cobl.lexicon.models import LanguageList, \ MeaningList, \ Meaning, \ Lexeme, \ CognateClass, \ CognateJudgement, \ LanguageClade, \ Clade class Command(BaseCommand): help = "Computes statistics for https://github.com/lingdb/CoBL/issues/236"\ "\nPossible parameters are: {1, 2, 3} for task number." def add_arguments(self, parser): parser.add_argument('task', type=int) missing_args_message = "Please provide a task number of {1,2,3}." def handle(self, *args, **options): # Data to work with: current = LanguageList.objects.get(name='Current') jena200 = MeaningList.objects.get(name='Jena200') languageIds = set(current.languages.values_list('id', flat=True)) meaningIds = jena200.meanings.values_list('id', flat=True) lexemeIds = Lexeme.objects.filter( language_id__in=languageIds, meaning_id__in=meaningIds).values_list('id', flat=True) cognateClassIds = CognateJudgement.objects.filter( lexeme_id__in=lexemeIds).values_list( 'cognate_class_id', flat=True) cognateClasses = CognateClass.objects.filter( id__in=cognateClassIds, root_form='').all() # Only without root_form is wanted. if options['task'] == 1: self.stdout.write('Task 1') self.report(self.compute(2, cognateClasses, meaningIds, languageIds), meaningIds) elif options['task'] == 2: self.stdout.write('Task 2') task1 = self.compute(2, cognateClasses, meaningIds, languageIds) task1CCIds = set([c.id for c in task1 if c is not None]) self.report([c for c in self.compute( 1, cognateClasses, meaningIds, languageIds) if c is not None and c.id not in task1CCIds], meaningIds) elif options['task'] == 3: self.stdout.write('Task 3') unwantedCognateClassIds = set( [c.id for c in self.compute(1, cognateClasses, meaningIds, languageIds) if c is not None]) cIdcladeMap = {c.id: c for c in Clade.objects.exclude( cladeLevel0=0).all()} # Computing ._cognateClasses for each clade: for _, clade in cIdcladeMap.items(): inCladeLanguageIds = set(LanguageClade.objects.filter( clade=clade).values_list('language_id', flat=True)) lexemes = Lexeme.objects.filter( language_id__in=languageIds & inCladeLanguageIds, meaning_id__in=meaningIds, not_swadesh_term=False).all() cognateClassIds = set(CognateJudgement.objects.filter( lexeme__in=lexemes).values_list( 'cognate_class_id', flat=True)) clade._cognateClassIds = set(CognateClass.objects.filter( id__in=cognateClassIds - unwantedCognateClassIds, root_form='').order_by('id').values_list('id', flat=True)) # Removing cognate class IDs we don't want: for _, clade in cIdcladeMap.items(): cogIdCounts = {cId: 0 for cId in clade._cognateClassIds} childIds = clade.queryChildren().values_list('id', flat=True) for childId in childIds: child = cIdcladeMap[childId] for cId in child._cognateClassIds: if cId in cogIdCounts: cogIdCounts[cId] += 1 # Setting ._cognateClassIds for current clade: clade._cognateClassIds = set([cId for cId, count in cogIdCounts.items() if count != 1]) # Updating children: for childId in childIds: child = cIdcladeMap[childId] child._cognateClassIds = child._cognateClassIds & \ set([cId for cId, count in cogIdCounts.items() if count == 1]) # Creating .txt files: for _, clade in cIdcladeMap.items(): # Grouping by meaning: meaningMarkdowns = {} for c in clade._cognateClassIds: s = '- [ ] cog. class '\ '[%s](http://cobl.info/cognate/%s/)' % (c, c) meanings = Meaning.objects.filter( lexeme__cognate_class=c, lexeme__language_id__in=languageIds, lexeme__not_swadesh_term=False, id__in=meaningIds).distinct().all() s += ''.join([ ' = meaning [%s](http://cobl.info/meaning/%s/)' % (m.gloss, m.gloss) for m in meanings]) for m in meanings: if m.gloss not in meaningMarkdowns: meaningMarkdowns[m.gloss] = [] meaningMarkdowns[m.gloss].append(s) # Composing markdown: markdown = [] for k in sorted(meaningMarkdowns.keys()): markdown += meaningMarkdowns[k] # Writing if content: if len(markdown) > 0: fname = '/tmp/%s.txt' % clade.taxonsetName self.stdout.write("Writing file '%s'." % fname) with open(fname, 'w') as f: f.write("\n".join(markdown)+"\n") def compute(self, lowerBranchBound, cognateClasses, meaningIds, languageIds): # The computation we want to perform twice for cognateClass in cognateClasses: lexemeIds = CognateJudgement.objects.filter( cognate_class_id=cognateClass.id).values_list( 'lexeme_id', flat=True) # Need to investigate lexemes: cladeNamesSet = set() for lexeme in Lexeme.objects.filter( id__in=lexemeIds, language_id__in=languageIds, meaning_id__in=meaningIds).all(): # Need to investigate clades: clades = Clade.objects.filter( id__in=LanguageClade.objects.filter( language_id=lexeme.language_id, language_id__in=languageIds).values_list( 'clade_id', flat=True), cladeLevel1=0).exclude( cladeLevel0=0 # Ignore PIE ).all() if len(clades) > 0: cladeNamesSet.add(', '.join([ c.cladeName for c in clades])) # Yield interesting clades: if len(cladeNamesSet) > lowerBranchBound: cognateClass.bNames = ', '.join('"%s"' % n for n in cladeNamesSet) yield(cognateClass) yield(None) # EOG def report(self, cognateClasses, meaningIds): # Print given cognateClasses: for cognateClass in cognateClasses: if cognateClass is None: continue lexemeIds = CognateJudgement.objects.filter( cognate_class_id=cognateClass.id).values_list( 'lexeme_id', flat=True) meaningNames = Meaning.objects.filter( lexeme__id__in=lexemeIds, id__in=meaningIds).distinct().values_list('gloss', flat=True) meaningNames = ', '.join(['"%s"' % m for m in meaningNames]) self.stdout.write("Cognate set id: %s " "meanings: %s branches: %s" % (cognateClass.id, meaningNames, cognateClass.bNames))
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.core.management import BaseCommand from cobl.lexicon.models import LanguageList, \ MeaningList, \ Meaning, \ Lexeme, \ CognateClass, \ CognateJudgement, \ LanguageClade, \ Clade class Command(BaseCommand): help = "Computes statistics for https://github.com/lingdb/CoBL/issues/236"\ "\nPossible parameters are: {1, 2, 3} for task number." def add_arguments(self, parser): parser.add_argument('task', type=int) missing_args_message = "Please provide a task number of {1,2,3}." def handle(self, *args, **options): # Data to work with: current = LanguageList.objects.get(name='Current') jena200 = MeaningList.objects.get(name='Jena200') languageIds = set(current.languages.values_list('id', flat=True)) meaningIds = jena200.meanings.values_list('id', flat=True) lexemeIds = Lexeme.objects.filter( language_id__in=languageIds, meaning_id__in=meaningIds).values_list('id', flat=True) cognateClassIds = CognateJudgement.objects.filter( lexeme_id__in=lexemeIds).values_list( 'cognate_class_id', flat=True) cognateClasses = CognateClass.objects.filter( id__in=cognateClassIds, root_form='').all() # Only without root_form is wanted. if options['task'] == 1: self.stdout.write('Task 1') self.report(self.compute(2, cognateClasses, meaningIds, languageIds), meaningIds) elif options['task'] == 2: self.stdout.write('Task 2') task1 = self.compute(2, cognateClasses, meaningIds, languageIds) task1CCIds = set([c.id for c in task1 if c is not None]) self.report([c for c in self.compute( 1, cognateClasses, meaningIds, languageIds) if c is not None and c.id not in task1CCIds], meaningIds) elif options['task'] == 3: self.stdout.write('Task 3') unwantedCognateClassIds = set( [c.id for c in self.compute(1, cognateClasses, meaningIds, languageIds) if c is not None]) cIdcladeMap = {c.id: c for c in Clade.objects.exclude( cladeLevel0=0).all()} # Computing ._cognateClasses for each clade: for _, clade in cIdcladeMap.items(): inCladeLanguageIds = set(LanguageClade.objects.filter( clade=clade).values_list('language_id', flat=True)) lexemes = Lexeme.objects.filter( language_id__in=languageIds & inCladeLanguageIds, meaning_id__in=meaningIds, not_swadesh_term=False).all() cognateClassIds = set(CognateJudgement.objects.filter( lexeme__in=lexemes).values_list( 'cognate_class_id', flat=True)) clade._cognateClassIds = set(CognateClass.objects.filter( id__in=cognateClassIds - unwantedCognateClassIds, root_form='').order_by('id').values_list('id', flat=True)) # Removing cognate class IDs we don't want: for _, clade in cIdcladeMap.items(): cogIdCounts = {cId: 0 for cId in clade._cognateClassIds} childIds = clade.queryChildren().values_list('id', flat=True) for childId in childIds: child = cIdcladeMap[childId] for cId in child._cognateClassIds: if cId in cogIdCounts: cogIdCounts[cId] += 1 # Setting ._cognateClassIds for current clade: clade._cognateClassIds = set([cId for cId, count in cogIdCounts.items() if count != 1]) # Updating children: for childId in childIds: child = cIdcladeMap[childId] child._cognateClassIds = child._cognateClassIds & \ set([cId for cId, count in cogIdCounts.items() if count == 1]) # Creating .txt files: for _, clade in cIdcladeMap.items(): # Grouping by meaning: meaningMarkdowns = {} for c in clade._cognateClassIds: s = '- [ ] cog. class '\ '[%s](http://cobl.info/cognate/%s/)' % (c, c) meanings = Meaning.objects.filter( lexeme__cognate_class=c, lexeme__language_id__in=languageIds, lexeme__not_swadesh_term=False, id__in=meaningIds).distinct().all() s += ''.join([ ' = meaning [%s](http://cobl.info/meaning/%s/)' % (m.gloss, m.gloss) for m in meanings]) for m in meanings: if m.gloss not in meaningMarkdowns: meaningMarkdowns[m.gloss] = [] meaningMarkdowns[m.gloss].append(s) # Composing markdown: markdown = [] for k in sorted(meaningMarkdowns.keys()): markdown += meaningMarkdowns[k] # Writing if content: if len(markdown) > 0: fname = '/tmp/%s.txt' % clade.taxonsetName self.stdout.write("Writing file '%s'." % fname) with open(fname, 'w') as f: f.write("\n".join(markdown)+"\n") def compute(self, lowerBranchBound, cognateClasses, meaningIds, languageIds): # The computation we want to perform twice for cognateClass in cognateClasses: lexemeIds = CognateJudgement.objects.filter( cognate_class_id=cognateClass.id).values_list( 'lexeme_id', flat=True) # Need to investigate lexemes: cladeNamesSet = set() for lexeme in Lexeme.objects.filter( id__in=lexemeIds, language_id__in=languageIds, meaning_id__in=meaningIds).all(): # Need to investigate clades: clades = Clade.objects.filter( id__in=LanguageClade.objects.filter( language_id=lexeme.language_id, language_id__in=languageIds).values_list( 'clade_id', flat=True), cladeLevel1=0).exclude( cladeLevel0=0 # Ignore PIE ).all() if len(clades) > 0: cladeNamesSet.add(', '.join([ c.cladeName for c in clades])) # Yield interesting clades: if len(cladeNamesSet) > lowerBranchBound: cognateClass.bNames = ', '.join('"%s"' % n for n in cladeNamesSet) yield(cognateClass) yield(None) # EOG def report(self, cognateClasses, meaningIds): # Print given cognateClasses: for cognateClass in cognateClasses: if cognateClass is None: continue lexemeIds = CognateJudgement.objects.filter( cognate_class_id=cognateClass.id).values_list( 'lexeme_id', flat=True) meaningNames = Meaning.objects.filter( lexeme__id__in=lexemeIds, id__in=meaningIds).distinct().values_list('gloss', flat=True) meaningNames = ', '.join(['"%s"' % m for m in meaningNames]) self.stdout.write("Cognate set id: %s " "meanings: %s branches: %s" % (cognateClass.id, meaningNames, cognateClass.bNames))
en
0.786243
# -*- coding: utf-8 -*- # Data to work with: # Only without root_form is wanted. # Computing ._cognateClasses for each clade: # Removing cognate class IDs we don't want: # Setting ._cognateClassIds for current clade: # Updating children: # Creating .txt files: # Grouping by meaning: # Composing markdown: # Writing if content: # The computation we want to perform twice # Need to investigate lexemes: # Need to investigate clades: # Ignore PIE # Yield interesting clades: # EOG # Print given cognateClasses:
2.026529
2
collation/test2.py
enabling-languages/dinka
1
9899
<filename>collation/test2.py<gh_stars>1-10 import pandas as pd from icu import Collator, Locale, RuleBasedCollator ddf = pd.read_csv("../word_frequency/unilex/din.txt", sep='\t', skiprows = range(2,5)) collator = Collator.createInstance(Locale('en_AU.UTF-8')) # https://stackoverflow.com/questions/13838405/custom-sorting-in-pandas-dataframe/27009771#27009771 # https://gist.github.com/seanpue/e1cb846f676194ae77eb def sort_pd(key=None,reverse=False): def sorter(series): series_list = list(series) return [series_list.index(i) for i in sorted(series_list,key=key,reverse=reverse)] return sorter sort_by_custom_dict = sort_pd(key=collator.getSortKey) #ddf.iloc[sort_by_custom_dict(ddf.index)] # ddf.iloc[sort_by_custom_dict(ddf['Form'])] ddf.iloc[sort_by_custom_dict(ddf['Form'])] #https://python3.wannaphong.com/2015/03/sort-python.html # https://pyerror.com/detail/1316/ lexemes = ddf.Form #lexemes2 = ddf['Form'] temp = lexemes.sort_values() collation_rules = "&A<<aa<<<aA<<<Aa<<<AA<<ä<<<Ä<<ää<<<äÄ<<<Ää<<<ÄÄ\n&D<dh<<<dH<<<Dh<<<DH\n&E<<ee<<<eE<<<Ee<<<EE<<ë<<<Ë<<ëë<<<ëË<<<Ëë<<<ËË<ɛ<<<Ɛ<<ɛɛ<<<ɛƐ<<<Ɛɛ<<<ƐƐ<<ɛ̈<<<Ɛ̈<<ɛ̈ɛ̈<<<ɛ̈Ɛ̈<<<Ɛ̈ɛ̈<<<Ɛ̈Ɛ̈\n&G<ɣ<<<Ɣ\n&I<<ii<<<iI<<<Ii<<<II<<ï<<<Ï<<ïï<<<ïÏ<<<Ïï<<<ÏÏ\n&N<nh<<<nH<<<Nh<<<NH<ny<<<nY<<<Ny<<<NH<ŋ<<<Ŋ\n&O<<oo<<<oO<<<Oo<<<OO<<ö<<<Ö<<öö<<<öÖ<<<Öö<<<ÖÖ<ɔ<<<Ɔ<<ɔɔ<<<ɔƆ<<<Ɔɔ<<<ƆƆ<<ɔ̈<<<Ɔ̈<<ɔ̈ɔ̈<<<ɔ̈Ɔ̈<<<Ɔ̈ɔ̈<<<Ɔ̈Ɔ̈\n&T<th<<<tH<<<Th<<<TH\n&U<<uu<<<uU<<<Uu<<<UU" custom_collator = RuleBasedCollator(collation_rules) temp.sort_values(key=lambda x: custom_collator.getSortKey(x) ) def sort_pd(key=None,reverse=False): def sorter(series): series_list = list(series) return [series_list.index(i) for i in sorted(series_list,key=key,reverse=reverse)] return sorter sort_by_custom_dict = sort_pd(key=custom_collator.getSortKey)
<filename>collation/test2.py<gh_stars>1-10 import pandas as pd from icu import Collator, Locale, RuleBasedCollator ddf = pd.read_csv("../word_frequency/unilex/din.txt", sep='\t', skiprows = range(2,5)) collator = Collator.createInstance(Locale('en_AU.UTF-8')) # https://stackoverflow.com/questions/13838405/custom-sorting-in-pandas-dataframe/27009771#27009771 # https://gist.github.com/seanpue/e1cb846f676194ae77eb def sort_pd(key=None,reverse=False): def sorter(series): series_list = list(series) return [series_list.index(i) for i in sorted(series_list,key=key,reverse=reverse)] return sorter sort_by_custom_dict = sort_pd(key=collator.getSortKey) #ddf.iloc[sort_by_custom_dict(ddf.index)] # ddf.iloc[sort_by_custom_dict(ddf['Form'])] ddf.iloc[sort_by_custom_dict(ddf['Form'])] #https://python3.wannaphong.com/2015/03/sort-python.html # https://pyerror.com/detail/1316/ lexemes = ddf.Form #lexemes2 = ddf['Form'] temp = lexemes.sort_values() collation_rules = "&A<<aa<<<aA<<<Aa<<<AA<<ä<<<Ä<<ää<<<äÄ<<<Ää<<<ÄÄ\n&D<dh<<<dH<<<Dh<<<DH\n&E<<ee<<<eE<<<Ee<<<EE<<ë<<<Ë<<ëë<<<ëË<<<Ëë<<<ËË<ɛ<<<Ɛ<<ɛɛ<<<ɛƐ<<<Ɛɛ<<<ƐƐ<<ɛ̈<<<Ɛ̈<<ɛ̈ɛ̈<<<ɛ̈Ɛ̈<<<Ɛ̈ɛ̈<<<Ɛ̈Ɛ̈\n&G<ɣ<<<Ɣ\n&I<<ii<<<iI<<<Ii<<<II<<ï<<<Ï<<ïï<<<ïÏ<<<Ïï<<<ÏÏ\n&N<nh<<<nH<<<Nh<<<NH<ny<<<nY<<<Ny<<<NH<ŋ<<<Ŋ\n&O<<oo<<<oO<<<Oo<<<OO<<ö<<<Ö<<öö<<<öÖ<<<Öö<<<ÖÖ<ɔ<<<Ɔ<<ɔɔ<<<ɔƆ<<<Ɔɔ<<<ƆƆ<<ɔ̈<<<Ɔ̈<<ɔ̈ɔ̈<<<ɔ̈Ɔ̈<<<Ɔ̈ɔ̈<<<Ɔ̈Ɔ̈\n&T<th<<<tH<<<Th<<<TH\n&U<<uu<<<uU<<<Uu<<<UU" custom_collator = RuleBasedCollator(collation_rules) temp.sort_values(key=lambda x: custom_collator.getSortKey(x) ) def sort_pd(key=None,reverse=False): def sorter(series): series_list = list(series) return [series_list.index(i) for i in sorted(series_list,key=key,reverse=reverse)] return sorter sort_by_custom_dict = sort_pd(key=custom_collator.getSortKey)
en
0.524828
# https://stackoverflow.com/questions/13838405/custom-sorting-in-pandas-dataframe/27009771#27009771 # https://gist.github.com/seanpue/e1cb846f676194ae77eb #ddf.iloc[sort_by_custom_dict(ddf.index)] # ddf.iloc[sort_by_custom_dict(ddf['Form'])] #https://python3.wannaphong.com/2015/03/sort-python.html # https://pyerror.com/detail/1316/ #lexemes2 = ddf['Form']
2.895085
3