aboutsummaryrefslogtreecommitdiffstats
path: root/jython-tosca-parser/src/main/resources/Lib/site-packages/requests-2.10.0-py2.7.egg/requests/packages/chardet/sjisprober.py
blob: cd0e9e7078b38741e9e610d7b9a92a78369a3eb9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#   Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301  USA
######################### END LICENSE BLOCK #########################

import sys
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import SJISDistributionAnalysis
from .jpcntx import SJISContextAnalysis
from .mbcssm import SJISSMModel
from . import constants


class SJISProber(MultiByteCharSetProber):
    def __init__(self):
        MultiByteCharSetProber.__init__(self)
        self._mCodingSM = CodingStateMachine(SJISSMModel)
        self._mDistributionAnalyzer = SJISDistributionAnalysis()
        self._mContextAnalyzer = SJISContextAnalysis()
        self.reset()

    def reset(self):
        MultiByteCharSetProber.reset(self)
        self._mContextAnalyzer.reset()

    def get_charset_name(self):
        return self._mContextAnalyzer.get_charset_name()

    def feed(self, aBuf):
        aLen = len(aBuf)
        for i in range(0, aLen):
            codingState = self._mCodingSM.next_state(aBuf[i])
            if codingState == constants.eError:
                if constants._debug:
                    sys.stderr.write(self.get_charset_name()
                                     + ' prober hit error at byte ' + str(i)
                                     + '\n')
                self._mState = constants.eNotMe
                break
            elif codingState == constants.eItsMe:
                self._mState = constants.eFoundIt
                break
            elif codingState == constants.eStart:
                charLen = self._mCodingSM.get_current_charlen()
                if i == 0:
                    self._mLastChar[1] = aBuf[0]
                    self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
                                                charLen)
                    self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
                else:
                    self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
                                                     - charLen], charLen)
                    self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
                                                     charLen)

        self._mLastChar[0] = aBuf[aLen - 1]

        if self.get_state() == constants.eDetecting:
            if (self._mContextAnalyzer.got_enough_data() and
               (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
                self._mState = constants.eFoundIt

        return self.get_state()

    def get_confidence(self):
        contxtCf = self._mContextAnalyzer.get_confidence()
        distribCf = self._mDistributionAnalyzer.get_confidence()
        return max(contxtCf, distribCf)
('type') node_type = req.get('node') value = req if node_type: keyword = 'node' else: # If value is a dict and has a type key # we need to lookup the node type using # the capability type value = req if isinstance(value, dict): captype = value['capability'] value = (self. _get_node_type_by_cap(key, captype)) relation = self._get_relation(key, value) keyword = key node_type = value rtype = RelationshipType(relation, keyword, self.custom_def) relatednode = NodeType(node_type, self.custom_def) relationship[rtype] = relatednode return relationship def _get_node_type_by_cap(self, key, cap): '''Find the node type that has the provided capability This method will lookup all node types if they have the provided capability. ''' # Filter the node types node_types = [node_type for node_type in self.TOSCA_DEF.keys() if node_type.startswith(self.NODE_PREFIX) and node_type != 'tosca.nodes.Root'] for node_type in node_types: node_def = self.TOSCA_DEF[node_type] if isinstance(node_def, dict) and 'capabilities' in node_def: node_caps = node_def['capabilities'] for value in node_caps.values(): if isinstance(value, dict) and \ 'type' in value and value['type'] == cap: return node_type def _get_relation(self, key, ndtype): relation = None ntype = NodeType(ndtype) caps = ntype.get_capabilities() if caps and key in caps.keys(): c = caps[key] for r in self.RELATIONSHIP_TYPE: rtypedef = ntype.TOSCA_DEF[r] for properties in rtypedef.values(): if c.type in properties: relation = r break if relation: break else: for properties in rtypedef.values(): if c.parent_type in properties: relation = r break return relation def get_capabilities_objects(self): '''Return a list of capability objects.''' typecapabilities = [] caps = self.get_value(self.CAPABILITIES, None, True) if caps: # 'name' is symbolic name of the capability # 'value' is a dict { 'type': <capability type name> } for name, value in caps.items(): ctype = value.get('type') cap = CapabilityTypeDef(name, ctype, self.type, self.custom_def) typecapabilities.append(cap) return typecapabilities def get_capabilities(self): '''Return a dictionary of capability name-objects pairs.''' return {cap.name: cap for cap in self.get_capabilities_objects()} @property def requirements(self): return self.get_value(self.REQUIREMENTS, None, True) def get_all_requirements(self): return self.requirements @property def interfaces(self): return self.get_value(self.INTERFACES) @property def lifecycle_inputs(self): '''Return inputs to life cycle operations if found.''' inputs = [] interfaces = self.interfaces if interfaces: for name, value in interfaces.items(): if name == ifaces.LIFECYCLE: for x, y in value.items(): if x == 'inputs': for i in y.iterkeys(): inputs.append(i) return inputs @property def lifecycle_operations(self): '''Return available life cycle operations if found.''' ops = None interfaces = self.interfaces if interfaces: i = InterfacesDef(self.type, ifaces.LIFECYCLE) ops = i.lifecycle_ops return ops def get_capability(self, name): caps = self.get_capabilities() if caps and name in caps.keys(): return caps[name].value def get_capability_type(self, name): captype = self.get_capability(name) if captype and name in captype.keys(): return captype[name].value def _validate_keys(self): if self.defs: for key in self.defs.keys(): if key not in self.SECTIONS: ExceptionCollector.appendException( UnknownFieldError(what='Nodetype"%s"' % self.ntype, field=key))