@@ -1,3 +1,4 @@ | |||
.ipynb_checkpoints | |||
.idea | |||
references | |||
@@ -20,10 +20,10 @@ | |||
"source": [ | |||
"The **print** statement can be used in the following different ways :\n", | |||
"\n", | |||
" - print \"Hello World\"\n", | |||
" - print \"Hello\", <Variable Containing the String>\n", | |||
" - print \"Hello\" + <Variable Containing the String>\n", | |||
" - print \"Hello %s\" % <variable containing the string>" | |||
" - print(\"Hello World\")\n", | |||
" - print(\"Hello\", <Variable Containing the String>)\n", | |||
" - print(\"Hello\" + <Variable Containing the String>)\n", | |||
" - print(\"Hello %s\" % <variable containing the string>)" | |||
] | |||
}, | |||
{ | |||
@@ -377,7 +377,7 @@ | |||
"cell_type": "markdown", | |||
"metadata": {}, | |||
"source": [ | |||
"#PrecisionWidth and FieldWidth" | |||
"# PrecisionWidth and FieldWidth" | |||
] | |||
}, | |||
{ | |||
@@ -0,0 +1,134 @@ | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
# kernelspec: | |||
# display_name: Python 3 | |||
# language: python | |||
# name: python3 | |||
# language_info: | |||
# codemirror_mode: | |||
# name: ipython | |||
# version: 3 | |||
# file_extension: .py | |||
# mimetype: text/x-python | |||
# name: python | |||
# nbconvert_exporter: python | |||
# pygments_lexer: ipython3 | |||
# version: 3.5.2 | |||
# --- | |||
# All the IPython Notebooks in this lecture series are available at https://github.com/rajathkumarmp/Python-Lectures | |||
# # Print Statement | |||
# The **print** statement can be used in the following different ways : | |||
# | |||
# - print("Hello World") | |||
# - print("Hello", <Variable Containing the String>) | |||
# - print("Hello" + <Variable Containing the String>) | |||
# - print("Hello %s" % <variable containing the string>) | |||
print("Hello World") | |||
# In Python, single, double and triple quotes are used to denote a string. | |||
# Most use single quotes when declaring a single character. | |||
# Double quotes when declaring a line and triple quotes when declaring a paragraph/multiple lines. | |||
print('Hey') | |||
print("""My name is Rajath Kumar M.P. | |||
I love Python.""") | |||
# Strings can be assigned to variable say _string1_ and _string2_ which can called when using the print statement. | |||
# + {"scrolled": true} | |||
string1 = 'World' | |||
print('Hello', string1) | |||
string2 = '!' | |||
print('Hello', string1, string2) | |||
# - | |||
# String concatenation is the "addition" of two strings. Observe that while concatenating there will be no space between the strings. | |||
print('Hello' + string1 + string2) | |||
# **%s** is used to refer to a variable which contains a string. | |||
print("Hello %s" % string1) | |||
# Similarly, when using other data types | |||
# | |||
# - %s -> string | |||
# - %d -> Integer | |||
# - %f -> Float | |||
# - %o -> Octal | |||
# - %x -> Hexadecimal | |||
# - %e -> exponential | |||
# | |||
# This can be used for conversions inside the print statement itself. | |||
print("Actual Number = %d" % 18) | |||
print("Float of the number = %f" % 18) | |||
print("Octal equivalent of the number = %o" % 18) | |||
print("Hexadecimal equivalent of the number = %x" % 18) | |||
print("Exponential equivalent of the number = %e" % 18) | |||
# When referring to multiple variables parenthesis is used. | |||
print "Hello %s %s" %(string1,string2) | |||
# ## Other Examples | |||
# The following are other different ways the print statement can be put to use. | |||
print("I want %%d to be printed %s" %'here') | |||
print('_A'*10) | |||
print("Jan\nFeb\nMar\nApr\nMay\nJun\nJul\nAug") | |||
print("\n".join("Jan Feb Mar Apr May Jun Jul Aug".split(" "))) | |||
print("I want \\n to be printed.") | |||
print """ | |||
Routine: | |||
\t- Eat | |||
\t- Sleep\n\t- Repeat | |||
""" | |||
# # PrecisionWidth and FieldWidth | |||
# Fieldwidth is the width of the entire number and precision is the width towards the right. One can alter these widths based on the requirements. | |||
# | |||
# The default Precision Width is set to 6. | |||
"%f" % 3.121312312312 | |||
# Notice upto 6 decimal points are returned. To specify the number of decimal points, '%(fieldwidth).(precisionwidth)f' is used. | |||
"%.5f" % 3.121312312312 | |||
# If the field width is set more than the necessary than the data right aligns itself to adjust to the specified values. | |||
"%9.5f" % 3.121312312312 | |||
# Zero padding is done by adding a 0 at the start of fieldwidth. | |||
"%020.5f" % 3.121312312312 | |||
# For proper alignment, a space can be left blank in the field width so that when a negative number is used, proper alignment is maintained. | |||
print "% 9f" % 3.121312312312 | |||
print "% 9f" % -3.121312312312 | |||
# '+' sign can be returned at the beginning of a positive number by adding a + sign at the beginning of the field width. | |||
print "%+9f" % 3.121312312312 | |||
print "% 9f" % -3.121312312312 | |||
# As mentioned above, the data right aligns itself when the field width mentioned is larger than the actualy field width. But left alignment can be done by specifying a negative symbol in the field width. | |||
"%-9.3f" % 3.121312312312 |
@@ -0,0 +1,386 @@ | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
# kernelspec: | |||
# display_name: Python 3 | |||
# language: python | |||
# name: python3 | |||
# language_info: | |||
# codemirror_mode: | |||
# name: ipython | |||
# version: 3 | |||
# file_extension: .py | |||
# mimetype: text/x-python | |||
# name: python | |||
# nbconvert_exporter: python | |||
# pygments_lexer: ipython3 | |||
# version: 3.5.2 | |||
# --- | |||
# All the IPython Notebooks in this lecture series are available at https://github.com/rajathkumarmp/Python-Lectures | |||
# # Data Structures | |||
# In simple terms, It is the the collection or group of data in a particular structure. | |||
# ## Lists | |||
# Lists are the most commonly used data structure. Think of it as a sequence of data that is enclosed in square brackets and data are separated by a comma. Each of these data can be accessed by calling it's index value. | |||
# | |||
# Lists are declared by just equating a variable to '[ ]' or list. | |||
a = [] | |||
print(type(a)) | |||
# One can directly assign the sequence of data to a list x as shown. | |||
x = ['apple', 'orange', 'peach'] | |||
# ### Indexing | |||
# In python, Indexing starts from 0. Thus now the list x, which has two elements will have apple at 0 index and orange at 1 index. | |||
x[0] | |||
# Indexing can also be done in reverse order. That is the last element can be accessed first. Here, indexing starts from -1. Thus index value -1 will be orange and index -2 will be apple. | |||
x[-1] | |||
# As you might have already guessed, x[0] = x[-2], x[1] = x[-1]. This concept can be extended towards lists with more many elements. | |||
y = ['carrot','potato'] | |||
# Here we have declared two lists x and y each containing its own data. Now, these two lists can again be put into another list say z which will have it's data as two lists. This list inside a list is called as nested lists and is how an array would be declared which we will see later. | |||
z = [x,y] | |||
print(z) | |||
# Indexing in nested lists can be quite confusing if you do not understand how indexing works in python. So let us break it down and then arrive at a conclusion. | |||
# | |||
# Let us access the data 'apple' in the above nested list. | |||
# First, at index 0 there is a list ['apple','orange'] and at index 1 there is another list ['carrot','potato']. Hence z[0] should give us the first list which contains 'apple'. | |||
z1 = z[0] | |||
print(z1) | |||
# Now observe that z1 is not at all a nested list thus to access 'apple', z1 should be indexed at 0. | |||
z1[0] | |||
# Instead of doing the above, In python, you can access 'apple' by just writing the index values each time side by side. | |||
z[0][0] | |||
# If there was a list inside a list inside a list then you can access the innermost value by executing z[ ][ ][ ]. | |||
# ### Slicing | |||
# Indexing was only limited to accessing a single element, Slicing on the other hand is accessing a sequence of data inside the list. In other words "slicing" the list. | |||
# | |||
# Slicing is done by defining the index values of the first element and the last element from the parent list that is required in the sliced list. It is written as parentlist[ a : b ] where a,b are the index values from the parent list. If a or b is not defined then the index value is considered to be the first value for a if a is not defined and the last value for b when b is not defined. | |||
num = [0,1,2,3,4,5,6,7,8,9] | |||
print(num[0:4]) | |||
print(num[4:]) | |||
# You can also slice a parent list with a fixed length or step length. | |||
num[:9:3] | |||
# ### Built in List Functions | |||
# To find the length of the list or the number of elements in a list, **len( )** is used. | |||
len(num) | |||
# If the list consists of all integer elements then **min( )** and **max( )** gives the minimum and maximum value in the list. | |||
min(num) | |||
max(num) | |||
# Lists can be concatenated by adding, '+' them. The resultant list will contain all the elements of the lists that were added. The resultant list will not be a nested list. | |||
[1,2,3] + [5,4,7] | |||
# There might arise a requirement where you might need to check if a particular element is there in a predefined list. Consider the below list. | |||
names = ['Earth','Air','Fire','Water'] | |||
# To check if 'Fire' and 'Rajath' is present in the list names. A conventional approach would be to use a for loop and iterate over the list and use the if condition. But in python you can use 'a in b' concept which would return 'True' if a is present in b and 'False' if not. | |||
'Fire' in names | |||
'Rajath' in names | |||
# In a list with elements as string, **max( )** and **min( )** is applicable. **max( )** would return a string element whose ASCII value is the highest and the lowest when **min( )** is used. Note that only the first index of each element is considered each time and if they value is the same then second index considered so on and so forth. | |||
mlist = ['bzaa','ds','nc','az','z','klm'] | |||
print(max(mlist)) | |||
print(min(mlist)) | |||
# Here the first index of each element is considered and thus z has the highest ASCII value thus it is returned and minimum ASCII is a. But what if numbers are declared as strings? | |||
nlist = ['1','94','93','1000'] | |||
print(max(nlist)) | |||
print(min(nlist)) | |||
# Even if the numbers are declared in a string the first index of each element is considered and the maximum and minimum values are returned accordingly. | |||
# But if you want to find the **max( )** string element based on the length of the string then another parameter 'key=len' is declared inside the **max( )** and **min( )** function. | |||
print(max(names, key=len)) | |||
print(min(names, key=len)) | |||
# But even 'Water' has length 5. **max()** or **min()** function returns the first element when there are two or more elements with the same length. | |||
# | |||
# Any other built in function can be used or lambda function (will be discussed later) in place of len. | |||
# | |||
# A string can be converted into a list by using the **list()** function. | |||
list('hello') | |||
# **append( )** is used to add a element at the end of the list. | |||
lst = [1,1,4,8,7] | |||
lst.append(1) | |||
print(lst) | |||
# **count( )** is used to count the number of a particular element that is present in the list. | |||
lst.count(1) | |||
# **append( )** function can also be used to add a entire list at the end. Observe that the resultant list becomes a nested list. | |||
lst1 = [5,4,2,8] | |||
lst.append(lst1) | |||
print(lst) | |||
# But if nested list is not what is desired then **extend( )** function can be used. | |||
lst.extend(lst1) | |||
print(lst) | |||
# **index( )** is used to find the index value of a particular element. Note that if there are multiple elements of the same value then the first index value of that element is returned. | |||
lst.index(1) | |||
# **insert(x,y)** is used to insert a element y at a specified index value x. **append( )** function made it only possible to insert at the end. | |||
lst.insert(5, 'name') | |||
print(lst) | |||
# **insert(x,y)** inserts but does not replace element. If you want to replace the element with another element you simply assign the value to that particular index. | |||
lst[5] = 'Python' | |||
print(lst) | |||
# **pop( )** function return the last element in the list. This is similar to the operation of a stack. Hence it wouldn't be wrong to tell that lists can be used as a stack. | |||
lst.pop() | |||
# Index value can be specified to pop a ceratin element corresponding to that index value. | |||
lst.pop(0) | |||
# **pop( )** is used to remove element based on it's index value which can be assigned to a variable. One can also remove element by specifying the element itself using the **remove( )** function. | |||
lst.remove('Python') | |||
print(lst) | |||
# Alternative to **remove** function but with using index value is **del** | |||
del lst[1] | |||
print(lst) | |||
# The entire elements present in the list can be reversed by using the **reverse()** function. | |||
lst.reverse() | |||
print(lst) | |||
# Note that the nested list [5,4,2,8] is treated as a single element of the parent list lst. Thus the elements inside the nested list is not reversed. | |||
# | |||
# Python offers built in operation **sort( )** to arrange the elements in ascending order. | |||
lst.sort() | |||
print(lst) | |||
# For descending order, By default the reverse condition will be False for reverse. Hence changing it to True would arrange the elements in descending order. | |||
lst.sort(reverse=True) | |||
print(lst) | |||
# Similarly for lists containing string elements, **sort( )** would sort the elements based on it's ASCII value in ascending and by specifying reverse=True in descending. | |||
names.sort() | |||
print(names) | |||
names.sort(reverse=True) | |||
print(names) | |||
# To sort based on length key=len should be specified as shown. | |||
names.sort(key=len) | |||
print(names) | |||
names.sort(key=len,reverse=True) | |||
print(names) | |||
# ### Copying a list | |||
# Most of the new python programmers commit this mistake. Consider the following, | |||
lista= [2,1,4,3] | |||
listb = lista | |||
print(listb) | |||
# Here, We have declared a list, lista = [2,1,4,3]. This list is copied to listb by assigning it's value and it get's copied as seen. Now we perform some random operations on lista. | |||
lista.pop() | |||
print(lista) | |||
lista.append(9) | |||
print(lista) | |||
print listb | |||
# listb has also changed though no operation has been performed on it. This is because you have assigned the same memory space of lista to listb. So how do fix this? | |||
# | |||
# If you recall, in slicing we had seen that parentlist[a:b] returns a list from parent list with start index a and end index b and if a and b is not mentioned then by default it considers the first and last element. We use the same concept here. By doing so, we are assigning the data of lista to listb as a variable. | |||
lista = [2,1,4,3] | |||
listb = lista[:] | |||
print(listb) | |||
lista.pop() | |||
print(lista) | |||
lista.append(9) | |||
print(lista) | |||
print(listb) | |||
# ## Tuples | |||
# Tuples are similar to lists but only big difference is the elements inside a list can be changed but in tuple it cannot be changed. Think of tuples as something which has to be True for a particular something and cannot be True for no other values. For better understanding, Recall **divmod()** function. | |||
xyz = divmod(10,3) | |||
print(xyz) | |||
print(type(xyz)) | |||
# Here the quotient has to be 3 and the remainder has to be 1. These values cannot be changed whatsoever when 10 is divided by 3. Hence divmod returns these values in a tuple. | |||
# To define a tuple, A variable is assigned to paranthesis ( ) or tuple( ). | |||
tup = () | |||
tup2 = tuple() | |||
# If you want to directly declare a tuple it can be done by using a comma at the end of the data. | |||
27, | |||
# 27 when multiplied by 2 yields 54, But when multiplied with a tuple the data is repeated twice. | |||
2*(27,) | |||
# Values can be assigned while declaring a tuple. It takes a list as input and converts it into a tuple or it takes a string and converts it into a tuple. | |||
# + {"scrolled": true} | |||
tup3 = tuple([1,2,3]) | |||
print(tup3) | |||
tup4 = tuple('Hello') | |||
print(tup4) | |||
# - | |||
# It follows the same indexing and slicing as Lists. | |||
print(tup3[1]) | |||
tup5 = tup4[:3] | |||
print(tup5) | |||
# ### Mapping one tuple to another | |||
(a,b,c)= ('alpha','beta','gamma') | |||
print(a,b,c) | |||
d = tuple('RajathKumarMP') | |||
print(d) | |||
# ### Built In Tuple functions | |||
# **count()** function counts the number of specified element that is present in the tuple. | |||
d.count('a') | |||
# **index()** function returns the index of the specified element. If the elements are more than one then the index of the first element of that specified element is returned | |||
d.index('a') | |||
# ## Sets | |||
# Sets are mainly used to eliminate repeated numbers in a sequence/list. It is also used to perform some standard set operations. | |||
# | |||
# Sets are declared as set() which will initialize a empty set. Also set([sequence]) can be executed to declare a set with elements | |||
set1 = set() | |||
print(type(set1)) | |||
set0 = set([1,2,2,3,3,4]) | |||
print(set0) | |||
# elements 2,3 which are repeated twice are seen only once. Thus in a set each element is distinct. | |||
# ### Built-in Functions | |||
set1 = set([1,2,3]) | |||
set2 = set([2,3,4,5]) | |||
# **union( )** function returns a set which contains all the elements of both the sets without repition. | |||
set1.union(set2) | |||
# **add( )** will add a particular element into the set. Note that the index of the newly added element is arbitrary and can be placed anywhere not neccessarily in the end. | |||
set1.add(0) | |||
set1 | |||
# **intersection( )** function outputs a set which contains all the elements that are in both sets. | |||
set1.intersection(set2) | |||
# **difference( )** function ouptuts a set which contains elements that are in set1 and not in set2. | |||
set1.difference(set2) | |||
# **symmetric_difference( )** function ouputs a function which contains elements that are in one of the sets. | |||
set2.symmetric_difference(set1) | |||
# **issubset( ), isdisjoint( ), issuperset( )** is used to check if the set1/set2 is a subset, disjoint or superset of set2/set1 respectively. | |||
set1.issubset(set2) | |||
set2.isdisjoint(set1) | |||
set2.issuperset(set1) | |||
# **pop( )** is used to remove an arbitrary element in the set | |||
set1.pop() | |||
print(set1) | |||
# **remove( )** function deletes the specified element from the set. | |||
set1.remove(2) | |||
set1 | |||
# **clear( )** is used to clear all the elements and make that set an empty set. | |||
set1.clear() | |||
set1 |
@@ -318,7 +318,14 @@ | |||
"while i < 3:\n", | |||
" print(i ** 2)\n", | |||
" i = i+1\n", | |||
"print('Bye')" | |||
"print('Bye')\n", | |||
"\n", | |||
"# do-untile\n", | |||
"while True:\n", | |||
" #do something\n", | |||
" \n", | |||
" # check \n", | |||
" if xxxx: break" | |||
] | |||
}, | |||
{ | |||
@@ -0,0 +1,175 @@ | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
# kernelspec: | |||
# display_name: Python 3 | |||
# language: python | |||
# name: python3 | |||
# language_info: | |||
# codemirror_mode: | |||
# name: ipython | |||
# version: 3 | |||
# file_extension: .py | |||
# mimetype: text/x-python | |||
# name: python | |||
# nbconvert_exporter: python | |||
# pygments_lexer: ipython3 | |||
# version: 3.5.2 | |||
# --- | |||
# All the IPython Notebooks in this lecture series are available at https://github.com/rajathkumarmp/Python-Lectures | |||
# # Control Flow Statements | |||
# ## If | |||
# if some_condition: | |||
# | |||
# algorithm | |||
x = 12 | |||
if x >10: | |||
print("Hello") | |||
# ## If-else | |||
# if some_condition: | |||
# | |||
# algorithm | |||
# | |||
# else: | |||
# | |||
# algorithm | |||
x = 12 | |||
if x > 10: | |||
print("hello") | |||
else: | |||
print("world") | |||
# ## if-elif | |||
# if some_condition: | |||
# | |||
# algorithm | |||
# | |||
# elif some_condition: | |||
# | |||
# algorithm | |||
# | |||
# else: | |||
# | |||
# algorithm | |||
x = 10 | |||
y = 12 | |||
if x > y: | |||
print("x>y") | |||
elif x < y: | |||
print("x<y") | |||
else: | |||
print("x=y") | |||
# if statement inside a if statement or if-elif or if-else are called as nested if statements. | |||
x = 10 | |||
y = 12 | |||
if x > y: | |||
print("x>y") | |||
elif x < y: | |||
print("x<y") | |||
if x==10: | |||
print("x=10") | |||
else: | |||
print("invalid") | |||
else: | |||
print("x=y") | |||
# ## Loops | |||
# ### For | |||
# for variable in something: | |||
# | |||
# algorithm | |||
for i in range(5): | |||
print(i) | |||
# In the above example, i iterates over the 0,1,2,3,4. Every time it takes each value and executes the algorithm inside the loop. It is also possible to iterate over a nested list illustrated below. | |||
list_of_lists = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] | |||
for list1 in list_of_lists: | |||
print(list1) | |||
# A use case of a nested for loop in this case would be, | |||
list_of_lists = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] | |||
for list1 in list_of_lists: | |||
for x in list1: | |||
print(x) | |||
# ### While | |||
# while some_condition: | |||
# | |||
# algorithm | |||
# + | |||
i = 1 | |||
while i < 3: | |||
print(i ** 2) | |||
i = i+1 | |||
print('Bye') | |||
# do-untile | |||
while True: | |||
#do something | |||
# check | |||
if xxxx: break | |||
# - | |||
# ## Break | |||
# As the name says. It is used to break out of a loop when a condition becomes true when executing the loop. | |||
for i in range(100): | |||
print i | |||
if i>=7: | |||
break | |||
# ## Continue | |||
# This continues the rest of the loop. Sometimes when a condition is satisfied there are chances of the loop getting terminated. This can be avoided using continue statement. | |||
for i in range(10): | |||
if i>4: | |||
print("The end.") | |||
continue | |||
elif i<7: | |||
print(i) | |||
# ## List Comprehensions | |||
# Python makes it simple to generate a required list with a single line of code using list comprehensions. For example If i need to generate multiples of say 27 I write the code using for loop as, | |||
res = [] | |||
for i in range(1,11): | |||
x = 27*i | |||
res.append(x) | |||
print res | |||
# Since you are generating another list altogether and that is what is required, List comprehensions is a more efficient way to solve this problem. | |||
[27*x for x in range(1,11)] | |||
# That's it!. Only remember to enclose it in square brackets | |||
# Understanding the code, The first bit of the code is always the algorithm and then leave a space and then write the necessary loop. But you might be wondering can nested loops be extended to list comprehensions? Yes you can. | |||
[27*x for x in range(1,20) if x<=10] | |||
# Let me add one more loop to make you understand better, | |||
[27*z for i in range(50) if i==27 for z in range(1,11)] |
@@ -1,243 +0,0 @@ | |||
[TOC] | |||
# **$ \color {gray}Unsupervised \quad Learning $** | |||
利用无标签的数据学习 *数据的分布* 或 *数据与数据之间的关系* 被称作无监督学习 | |||
有监督学习和无监督学习的最大区别在于数据是否有标签 | |||
无监督学习最常应用的场景是**聚类**(clustering)和**降维**(DimensionReduction) | |||
## 聚类 | |||
聚类(clustering),就是根据数据的"相似性"将数据分为多类的过程 | |||
 | |||
### 聚类算法 | |||
+ 分散型聚类(k-means) | |||
+ 结构型聚类(层次聚类) | |||
### 距离函数 | |||
(相似性或相异性):度量两个数据点的相似程度 | |||
### 聚类评价 | |||
+ 类内差异(聚类内部距离):最小化 | |||
+ 类间差异(聚类外部距离):最大化 | |||
聚类结果的质量与**算法、距离函数**和**应用领域**有很大关系 | |||
## $\color{red} k-均值聚类 $ | |||
+ k-均值算法是划分聚类算法 | |||
+ 根据某个距离函数反复地把数据分入k个聚类中 | |||
+ **k-均值算法把给定的数据划分为k个聚类** | |||
+ 每个聚类中有一个聚类的中心,用来表示某个聚类,这个中心时聚类中所有数据点的均值 | |||
+ k是由用户指定的 | |||
### k-均值算法执行步骤 | |||
``` | |||
1. 随机选取k个数据点作为初始聚类中心 | |||
2. 计算每个数据点与各个中心之间的距离,把每个数据点分配给距离他最近的聚类中心 | |||
3. 数据点分配以后,每个聚类的聚类中心会根据聚类现有的数据点重新计算 | |||
4. 不断重复直到满足某个终止条件为止 | |||
``` | |||
### 终止条件 | |||
+ 没有(或最小数目)数据点被重新分配给不同的聚类 | |||
+ 没有(或最小数目)聚类中心再发生变化 | |||
+ `误差平方和`(sum of squared error,SSE)局部最小 | |||
$$ | |||
\sum_{j=1}^k \sum {x\in C_j} dist(x,m_j)^2 | |||
$$ | |||
其中,$ C_j $表示第$j$个聚类,$m_j$是聚类$C_j$的聚类中心($C_j$中所有数据点的均值向量),$dist(x,m_j)$表示数据点$x$和聚类中心$m_j$之间的距离。 | |||
### 示例 | |||
 | |||
![]./pic/03.jpeg) | |||
### 距离计算 | |||
+ 在那些均值能被定义和计算的数据集上均能使用k-均值算法 | |||
+ 在`欧式空间`,聚类均值可以使用如下公式 $m_j = {1\over \mid C_j \mid} \sum_ {x_i \in C_j} X_i$ | |||
+ 数据点与聚类中心的距离使用如下公式计算$dist(x_i,m_j)=\mid\mid x_i - m_j \mid\mid = \sqrt{(x_{i1} - m_{j1}) ^2 + (x_{i2} - m_{j2}) ^2 +...+ (x_{ir} - m_{jr}) ^2} $ | |||
### 算法举例 | |||
 | |||
 | |||
 | |||
### 优势与劣势 | |||
#### 优势 | |||
+ 简单:容易被理解,同时也容易被实现 | |||
+ 效率:时间复杂度为$O(tkn)$(其中,$k$是聚类的个数;$t$是循环次数) | |||
+ 由于$k$和$t$通常都远远小于$n$,k-均值算法被认为相对于数据点的数目来说是线性的 | |||
+ k-均值算法时聚类算法中最流行的一种算法 | |||
#### 劣势 | |||
+ 算法只能用于那些均值能够被定义的数据集上。 | |||
+ 对于`范畴数据`,有一种k-均值算法的变体——`k-模算法`。 | |||
+ 需要事先指定聚类数目k | |||
+ 算法对于`异常值`十分敏感 | |||
+ 异常值是指数据中那些与其他数据点相隔很远的数据点 | |||
+ 异常值可能是数据采集时产生的错误或者一些具有不同值的特殊点 | |||
#### 解决异常值问题 | |||
 | |||
+ 一种方法:在聚类过程中去除那些比其他数据点离聚类中心都要远的数据点(一般需要在多次循环中监控这些潜在的异常值,然后再决定是否删除他们) | |||
+ 另一种方法:`随机采样`,因为采样过程中只选择很少一部分的数据点,因此选中异常值的概率将会很小(可以先用采样点进行预先聚类然后把其他数据点分配给这些聚类) | |||
#### 算法对初始种子十分敏感 | |||
 | |||
 | |||
#### 算法不适合发现那些形状不是`超维椭圆体`(或超维球体)的聚类 | |||
 | |||
## 层次聚类 | |||
 | |||
### 合并(自下而上)聚类: | |||
从树状图的最底层开始构建树: | |||
+ 合并最相似(距离最近)的聚类来形成山一层中的聚类 | |||
+ 整个过程当全部数据点都合并到一个聚类中时停止 | |||
### 分裂(自上而下)聚类: | |||
从一个包含全部数据点的聚类开始: | |||
+ 根节点聚类分裂成一些子聚类。每个子聚类在递归地继续向下分裂 | |||
+ 直到出现只包含一个数据点的单节点聚类出现时停止 | |||
#### 样例 | |||
 | |||
 | |||
 | |||
 | |||
### 两个聚类之间距离的计算 | |||
+ 单链接方法 | |||
+ 全链接方法 | |||
+ 平均链接方法 | |||
+ 聚类中心方法 | |||
+ ... | |||
#### 单链接方法 | |||
+ 两个聚类中距离最近的两个数据点之间的距离 | |||
+ 适合于寻找那些形状怪异的聚类 | |||
+ 对数据中的噪声值十分敏感 | |||
#### 全链接方法 | |||
+ 两个聚类中所有数据点之间聚类的最大值 | |||
+ 对异常值十分敏感 | |||
#### 平均链接方法 | |||
+ 介于全链接防范对于异常值的而敏感型和单链接方法形成长链(这种倡廉不符合聚类时紧密的椭圆体对象这一常识)的趋势之间的折中办法 | |||
+ 两个聚类之间距离是两个聚类之中多个数据点对之间距离值和的平均值 | |||
#### 聚类中心方法 | |||
两个聚类之间的距离是两个聚类中心之间的距离 | |||
#### 复杂度 | |||
 | |||
## $\color{red} PCA$ | |||
### 问题 | |||
真实的训练数据总是存在各种各样的问题: | |||
- 比如拿到一个汽车的样本,里面既有以“千米/每小时”度量的最大速度特征,也有“英里/小时”的最大速度特征,显然这两个特征有一个多余。 | |||
- 拿到一个数学系的本科生期末考试成绩单,里面有三列,一列是对数学的兴趣程度,一列是复习时间,还有一列是考试成绩。我们知道要学好数学,需要有浓厚的兴趣,所以第二项与第一项强相关,第三项和第二项也是强相关。那是不是可以合并第一项和第二项呢? | |||
- 拿到一个样本,特征非常多,而样例特别少,这样用回归去直接拟合非常困难,容易过度拟合。比如北京的房价:假设房子的特征是(大小、位置、朝向、是否学区房、建造年代、是否二手、层数、所在层数),搞了这么多特征,结果只有不到十个房子的样例。要拟合房子特征->房价的这么多特征,就会造成过度拟合。 | |||
下面探讨一种称作主成分分析(PCA)的方法来解决部分上述问题。PCA的思想是将$n$维特征映射到k维$(k<n)$,这$k$维是全新的正交特征。这$k$维特征称为主元,是重新构造出来的$k$维特征,而不是简单地从$n$维特征中去除其余$n-k$维特征。 | |||
### 计算过程 | |||
1. 计算各个特征的平均值。 | |||
2. 求特征协方差矩阵。 | |||
3. 求协方差的特征值和特征向量。 | |||
4. 将特征值按照从大到小的顺序排序,选择其中最大的$k$个,然后将其对应的$k$个特征向量分别作为列向量组成特征向量矩阵。 | |||
5. 将样本点投影到选取的特征向量上。假设样例数为$m$,特征数为$n$,减去均值后的样本矩阵为DataAdjust$(m*n)$,协方差矩阵为$m*n$,选取的$k$个特征向量组成的矩阵为EigenVectors$(n*k)$。那么投影后的数据$FinalData$为 | |||
$$ | |||
FinalData(m*k)=DataAdjust(m*n)×EigenVectors(n*k) | |||
$$ | |||
### 样例 | |||
+ 首先我们假设得到的2维数据如下: | |||
 | |||
行代表了样例,列代表特征,这里有10个样例,每个样例两个特征。可以这样认为,有10篇文档,$x$是10篇文档中“learn”出现的TF-IDF,$y$是10篇文档中“study”出现的TF-IDF。也可以认为有10辆汽车,$x$是千米/小时的速度,$y$是英里/小时的速度,等等。 | |||
1. 第一步:分别求$x$和$y$的平均值,然后对于所有的样例,都减去对应的均值。这里$x$的均值是1.81,$y$的均值是1.91,那么一个样例减去均值后即为(0.69,0.49),得到 | |||
 | |||
2. 第二步:求特征协方差矩阵,数据是2维,协方差矩阵为 | |||
 | |||
 | |||
对角线上分别是$x$和$y$的方差,非对角线上是协方差。协方差大于0表示$x$和$y$若有一个增,另一个也增;小于0表示一个增,一个减;协方差为0时,两者独立。协方差绝对值越大,两者对彼此的影响越大,反之越小。 | |||
3. 求协方差的特征值和特征向量,得到 | |||
 | |||
上面是两个特征值,下面是对应的特征向量,特征值0.0490833989对应特征向量为$ (-0.735178656,0.677873399)^T$,这里的特征向量都归一化为单位向量。 | |||
4. 将特征值按照从大到小的顺序排序,选择其中最大的$k$个,然后将其对应的$k$个特征向量分别作为列向量组成特征向量矩阵。这里特征值只有两个,我们选择其中最大的那个,这里是1.28402771,对应的特征向量是$(0.6)77873399,-0.735178656)^T$ | |||
5. 将样本点投影到选取的特征向量上。假设样例数为m,特征数为n,减去均值后的样本矩阵为$DataAdjust(m*n)$,协方差矩阵是$n*n$,选取的k个特征向量组成的矩阵为$EigenVectors(n*k)$。那么投影后的数据$FinalData$为$FinalData(m*k) = DataAdjust(m*n) \times EigenVectors(n*k)$ | |||
得到结果是: | |||
 | |||
这样,就将原始样例的n维特征变成了k维,这k维就是原始特征在k维上的投影。 | |||
上面的数据可以认为是learn和study特征融合为一个新的特征叫做LS特征,该特征基本上代表了这两个特征。 | |||
上述过程有个图描述 | |||
 | |||
正号表示预处理后的样本点,斜着的两条线就分别是正交的特征向量(由于协方差矩阵是对称的,因此其特征向量正交),最后一步的矩阵乘法就是将原始样本点分别往特征向量对应的轴上做投影。 | |||
如果取的k=2,那么结果是 | |||
 | |||
这就是经过PCA处理后的样本数据,水平轴(上面举例为LS特征)基本上可以代表全部样本点。整个过程看起来就像将坐标系做了旋转,当然二维可以图形化表示,高维就不行了。上面的如果k=1,那么只会留下这里的水平轴,轴上是所有点在该轴的投影。 | |||
这样PCA的过程基本结束。在第一步减均值之后,其实应该还有一步对特征做方差归一化。比如一个特征是汽车速度(0到100),一个是汽车的座位数(2到6),显然第二个的方差比第一个小。因此,如果样本特征中存在这种情况,那么在第一步之后,求每个特征的标准差$\sigma$,然后对每个样例在该特征下的数据除以$\sigma$。 | |||
归纳一下,使用我们之前熟悉的表示方法,在求协方差之前的步骤是: | |||
 | |||
其中$x^{(i)}$ 是样例,共m个,每个样例n个特征,也就是说$x^{(i)}$是n维向量,$x_j^{(i)}$是第i个样例的第j个特征,$\mu$是样例均值,$\sigma_j$是第j个特征的标准差。 | |||
整个PCA过程貌似及其简单,就是求协方差的特征值和特征向量,然后做数据转换。但是有没有觉得很神奇,为什么求协方差的特征向量就是最理想的k维向量?其背后隐藏的意义是什么?整个PCA的意义是什么? | |||
### 理论基础 | |||
##### 最大方差理论 | |||
在信号处理中认为信号具有较大的方差,噪声有较小的方差,信噪比就是信号与噪声的方差比,越大越好。因此我们认为,最好的k维特征是将n维样本点转换为k维后,每一维上的样本方差都很大。 | |||
比如下图有5个样本点:(已经做过预处理,均值为0,特征方差归一) | |||
 | |||
下面将样本投影到某一维上,这里用一条过原点的直线表示(前处理的过程实质是将原点移到样本点的中心点)。 | |||
 | |||
假设我们选择两条不同的直线做投影,那么左右两条中哪个好呢?根据我们之前的方差最大化理论,左边的好,因为投影后的样本点之间方差最大。 | |||
 | |||
 | |||
红色点表示样例$x^{(i)}$,蓝色点表示$x^{(i)}$在$u$上的投影,$u$是直线的斜率也是直线的方向向量,而且是单位向量。蓝色点是$x^{(i)}$在$u$上的投影点,离原点的距离是$\mid| x^{(i)},u \mid \mid$(即$x^{(i)^T}u$或者$u^Tx^{(i)}$)由于这些样本点(样例)的每一维特征均值都为0,因此投影到$u$上的样本点(只有一个到原点的距离值)的均值仍然是0。 | |||
回到上面左右图中的左图,我们要求的是最佳的u,使得投影后的样本点方差最大。由于投影后均值为0,因此方差为: | |||
$$ | |||
\begin{align} | |||
\frac {1}{m}\sum_{i=1}^{m}(x^{(i)^T}u)^2 &= \frac{1}{m}\sum_{m=1}^{m}u^Tx^{{(i)}}x^{{(i)^T}}u\\ | |||
&= u^T(\frac{1}{m}\sum_{i=1}^{m}x^{(i)}x^{(i)^T})u. | |||
\end{align} | |||
$$ | |||
中间那部分很熟悉啊,不就是样本特征的协方差矩阵么,($x^{{(i)}}$的均值为0,一般协方差矩阵都除以$m-1$,这里用$m$)。 | |||
用$λ$来表示$\frac {1}{m}\sum_{i=1}^{m}(x^{(i)^T}u)^2$,$Σ$表示$ \frac{1}{m}\sum_{i=1}^{m}x^{(i)}x^{(i)^T} $,那么上式写作 | |||
$$ | |||
λ=u^T\sum u | |||
$$ | |||
由于$u$是单位向量,即$u^Tu=1$,上式两边都左乘$u$得,$uλ=λu=uu^T\sum u=\sum u$ | |||
即$\sum u = λu$ | |||
We got it!$\lambda$就是$\sum$的特征值,$u$是特征向量。最佳的投影只显示特征值$\lambda$最大时对应的特征向量,其次是$\lambda$第二大对应的特征向量,依次类推。 | |||
因此,我们只需要对协方差矩阵进行特征值分解,得到的前$k$大特征值对应的特征向量就是最佳的$k$维新特征,而且这$k$维新特征是正交的。得到前$k$个$u$以后,样例$x^{(i)}$通过以下变换可以得到新的样本。 | |||
$$ | |||
y^{(i)}=\left\{ | |||
\begin{matrix} | |||
u_1^Tx^{(i)} \\ | |||
u_2^Tx^{(i)} \\ | |||
... \\ | |||
u_k^Tx^{(i)} | |||
\end{matrix} | |||
\right\} | |||
$$ | |||
其中第$j$维就是$x^{(i)}$在$u_j$上的投影,通过选取最大的$k$个$u$,使得方差较小的特征(如噪声)被丢弃。 | |||
### 总结与讨论 | |||
PCA技术的一大好处是对数据进行降维的处理。我们可以对新求出的“主元”向量的重要性进行排序,根据需要取前面最重要的部分,将后面的维数省去,可以达到降维从而简化模型或是对数据进行压缩的效果。同时最大程度的保持了原有数据的信息。 | |||
PCA技术的一个很大的优点是,它是完全无参数限制的。在PCA的计算过程中完全不需要人为的设定参数或是根据任何经验模型对计算进行干预,最后的结果只与数p据相关,与用户是独立的。 但是,这一点同时也可以看作是缺点。如果用户对观测对象有一定的先验知识,掌握了数据的一些特征,却无法通过参数化等方法对处理过程进行干预,可能会得不到预期的效果,效率也不高。 | |||
有时数据的分布并不是满足高斯分布。如下图所示,在非高斯分布的情况下,PCA方法得出的主元可能并不是最优的。在寻找主元时不能将方差作为衡量重要性的标准。要根据数据的分布情况选择合适的描述完全分布的变量,然后根据概率分布式 | |||
$$ | |||
P(y_1,y_2)=P(y_1)P(y_2) | |||
$$ | |||
来计算两个向量上数据分布的相关性。等价的,保持主元间的正交假设,寻找的主元同样要使$P(y_1,y_2)=0$。这一类方法被称为独立主元分解(ICA)。在下图中,数据的分布并不满足高斯分布,呈明显的十字星状。 这种情况下,方差最大的方向并不是最优主元方向。 | |||
 | |||
## 笔记 | |||
1. PCA可以降维也可以升维 | |||
2. PCA的矩阵求解可以和slam线性优化中的`SVD分解`原理相同 | |||
3. 关于矩阵求解:特征值特征向量的求解方法计算量大,一般实际工程中采用SVD分解 | |||
4. PCA的实质是实对称矩阵的对角化来解决特征冗余 | |||
5. PCA的优点与应用:降噪,便于分类,pre-trained 深度网络的参数,便于后续的分类等操作 | |||
6. 练习使用`sklearn` 和` jupyter notebook`的使用 |
@@ -19,7 +19,113 @@ | |||
# version: 3.5.2 | |||
# --- | |||
# # k-means demo | |||
# # k-means | |||
# ## Theory | |||
# | |||
# 由于具有出色的速度和良好的可扩展性,K-Means聚类算法算得上是最著名的聚类方法。K-Means算法是一个重复移动类中心点的过程,把类的中心点,也称重心(centroids),移动到其包含成员的平均位置,然后重新划分其内部成员。 | |||
# | |||
# K是算法计算出的超参数,表示类的数量;K-Means可以自动分配样本到不同的类,但是不能决定究竟要分几个类。 | |||
# | |||
# K必须是一个比训练集样本数小的正整数。有时,类的数量是由问题内容指定的。例如,一个鞋厂有三种新款式,它想知道每种新款式都有哪些潜在客户,于是它调研客户,然后从数据里找出三类。也有一些问题没有指定聚类的数量,最优的聚类数量是不确定的。 | |||
# | |||
# K-Means的参数是类的重心位置和其内部观测值的位置。与广义线性模型和决策树类似,K-Means参数的最优解也是以成本函数最小化为目标。K-Means成本函数公式如下: | |||
# $$ | |||
# J = \sum_{k=1}^{K} \sum_{i \in C_k} | x_i - u_k|^2 | |||
# $$ | |||
# | |||
# $u_k$是第$k$个类的重心位置,定义为: | |||
# $$ | |||
# u_k = \frac{1}{|C_k|} \sum_{x \in C_k} x | |||
# $$ | |||
# | |||
# | |||
# 成本函数是各个类畸变程度(distortions)之和。每个类的畸变程度等于该类重心与其内部成员位置距离的平方和。若类内部的成员彼此间越紧凑则类的畸变程度越小,反之,若类内部的成员彼此间越分散则类的畸变程度越大。 | |||
# | |||
# 求解成本函数最小化的参数就是一个重复配置每个类包含的观测值,并不断移动类重心的过程。 | |||
# 1. 首先,类的重心是随机确定的位置。实际上,重心位置等于随机选择的观测值的位置。 | |||
# 2. 每次迭代的时候,K-Means会把观测值分配到离它们最近的类,然后把重心移动到该类全部成员位置的平均值那里。 | |||
# 3. 若达到最大迭代步数或两次迭代差小于设定的阈值则算法结束,否则重复步骤2。 | |||
# | |||
# | |||
# + | |||
% matplotlib inline | |||
import matplotlib.pyplot as plt | |||
import numpy as np | |||
X0 = np.array([7, 5, 7, 3, 4, 1, 0, 2, 8, 6, 5, 3]) | |||
X1 = np.array([5, 7, 7, 3, 6, 4, 0, 2, 7, 8, 5, 7]) | |||
plt.figure() | |||
plt.axis([-1, 9, -1, 9]) | |||
plt.grid(True) | |||
plt.plot(X0, X1, 'k.'); | |||
# - | |||
# 假设K-Means初始化时,将第一个类的重心设置在第5个样本,第二个类的重心设置在第11个样本.那么我们可以把每个实例与两个重心的距离都计算出来,将其分配到最近的类里面。计算结果如下表所示: | |||
#  | |||
# | |||
# 新的重心位置和初始聚类结果如下图所示。第一类用X表示,第二类用点表示。重心位置用稍大的点突出显示。 | |||
# | |||
# | |||
# | |||
C1 = [1, 4, 5, 9, 11] | |||
C2 = list(set(range(12)) - set(C1)) | |||
X0C1, X1C1 = X0[C1], X1[C1] | |||
X0C2, X1C2 = X0[C2], X1[C2] | |||
plt.figure() | |||
plt.title('1st iteration results') | |||
plt.axis([-1, 9, -1, 9]) | |||
plt.grid(True) | |||
plt.plot(X0C1, X1C1, 'rx') | |||
plt.plot(X0C2, X1C2, 'g.') | |||
plt.plot(4,6,'rx',ms=12.0) | |||
plt.plot(5,5,'g.',ms=12.0); | |||
# 现在我们重新计算两个类的重心,把重心移动到新位置,并重新计算各个样本与新重心的距离,并根据距离远近为样本重新归类。结果如下表所示: | |||
# | |||
#  | |||
# | |||
# 画图结果如下: | |||
C1 = [1, 2, 4, 8, 9, 11] | |||
C2 = list(set(range(12)) - set(C1)) | |||
X0C1, X1C1 = X0[C1], X1[C1] | |||
X0C2, X1C2 = X0[C2], X1[C2] | |||
plt.figure() | |||
plt.title('2nd iteration results') | |||
plt.axis([-1, 9, -1, 9]) | |||
plt.grid(True) | |||
plt.plot(X0C1, X1C1, 'rx') | |||
plt.plot(X0C2, X1C2, 'g.') | |||
plt.plot(3.8,6.4,'rx',ms=12.0) | |||
plt.plot(4.57,4.14,'g.',ms=12.0); | |||
# 我们再重复一次上面的做法,把重心移动到新位置,并重新计算各个样本与新重心的距离,并根据距离远近为样本重新归类。结果如下表所示: | |||
#  | |||
# | |||
# 画图结果如下: | |||
# | |||
C1 = [0, 1, 2, 4, 8, 9, 10, 11] | |||
C2 = list(set(range(12)) - set(C1)) | |||
X0C1, X1C1 = X0[C1], X1[C1] | |||
X0C2, X1C2 = X0[C2], X1[C2] | |||
plt.figure() | |||
plt.title('3rd iteration results') | |||
plt.axis([-1, 9, -1, 9]) | |||
plt.grid(True) | |||
plt.plot(X0C1, X1C1, 'rx') | |||
plt.plot(X0C2, X1C2, 'g.') | |||
plt.plot(5.5,7.0,'rx',ms=12.0) | |||
plt.plot(2.2,2.8,'g.',ms=12.0); | |||
# 再重复上面的方法就会发现类的重心不变了,K-Means会在条件满足的时候停止重复聚类过程。通常,条件是前后两次迭代的成本函数值的差达到了限定值,或者是前后两次迭代的重心位置变化达到了限定值。如果这些停止条件足够小,K-Means就能找到最优解。不过这个最优解不一定是全局最优解。 | |||
# | |||
# | |||
# ## Program | |||
# + | |||
# This line configures matplotlib to show figures embedded in the notebook, | |||
@@ -55,7 +161,7 @@ def originalDatashow(dataSet): | |||
plt.xlabel('sepal length') | |||
plt.ylabel('sepal width') | |||
plt.show() | |||
# + {"scrolled": true} | |||
#获取样本数据 | |||
@@ -213,3 +319,170 @@ def trgartshow(dataSet, k, labels): | |||
# 绘图显示 | |||
datashow(datamat, k, mycentroids, clusterAssment) | |||
trgartshow(datamat, 3, labels) | |||
# ## How to use sklearn to do the classifiction | |||
# | |||
# + | |||
from sklearn.datasets import load_digits | |||
import matplotlib.pyplot as plt | |||
from sklearn.cluster import KMeans | |||
# load digital data | |||
digits, dig_label = load_digits(return_X_y=True) | |||
# draw one digital | |||
plt.gray() | |||
plt.matshow(digits[0].reshape([8, 8])) | |||
plt.show() | |||
# calculate train/test data number | |||
N = len(digits) | |||
N_train = int(N*0.8) | |||
N_test = N - N_train | |||
# split train/test data | |||
x_train = digits[:N_train, :] | |||
y_train = dig_label[:N_train] | |||
x_test = digits[N_train:, :] | |||
y_test = dig_label[N_train:] | |||
# + | |||
# do kmeans | |||
kmeans = KMeans(n_clusters=10, random_state=0).fit(x_train) | |||
# kmeans.labels_ - output label | |||
# kmeans.cluster_centers_ - cluster centers | |||
# draw cluster centers | |||
fig, axes = plt.subplots(nrows=1, ncols=10) | |||
for i in range(10): | |||
img = kmeans.cluster_centers_[i].reshape(8, 8) | |||
axes[i].imshow(img) | |||
# - | |||
# ## Exerciese - How to caluate the accuracy? | |||
# | |||
# 1. How to match cluster label to groundtruth label | |||
# 2. How to solve the uncertainty of some digital | |||
# ## 评估聚类性能 | |||
# | |||
# 方法1: 如果被用来评估的数据本身带有正确的类别信息,则利用Adjusted Rand Index(ARI),ARI与分类问题中计算准确性的方法类似,兼顾了类簇无法和分类标记一一对应的问题。 | |||
# | |||
# | |||
# + | |||
from sklearn.metrics import adjusted_rand_score | |||
ari_train = adjusted_rand_score(y_train, kmeans.labels_) | |||
print("ari_train = %f" % ari_train) | |||
# - | |||
# Given the contingency table: | |||
#  | |||
# | |||
# the adjusted index is: | |||
#  | |||
# | |||
# * [ARI reference](https://davetang.org/muse/2017/09/21/adjusted-rand-index/) | |||
# | |||
# | |||
# 方法2: 如果被用来评估的数据没有所属类别,则使用轮廓系数(Silhouette Coefficient)来度量聚类结果的质量,评估聚类的效果。轮廓系数同时兼顾了聚类的凝聚都和分离度,取值范围是[-1,1],轮廓系数越大,表示聚类效果越好。 | |||
# | |||
# 轮廓系数的具体计算步骤: | |||
# 1. 对于已聚类数据中第i个样本$x_i$,计算$x_i$与其同一类簇内的所有其他样本距离的平均值,记作$a_i$,用于量化簇内的凝聚度 | |||
# 2. 选取$x_i$外的一个簇$b$,计算$x_i$与簇$b$中所有样本的平均距离,遍历所有其他簇,找到最近的这个平均距离,记作$b_i$,用于量化簇之间分离度 | |||
# 3. 对于样本$x_i$,轮廓系数为$sc_i = \frac{b_i−a_i}{max(b_i,a_i)}$ | |||
# 4. 最后,对所以样本集合$\mathbf{X}$求出平均值,即为当前聚类结果的整体轮廓系数。 | |||
# + | |||
import numpy as np | |||
from sklearn.cluster import KMeans | |||
from sklearn.metrics import silhouette_score | |||
import matplotlib.pyplot as plt | |||
plt.rcParams['figure.figsize']=(10,10) | |||
plt.subplot(3,2,1) | |||
x1=np.array([1,2,3,1,5,6,5,5,6,7,8,9,7,9]) #初始化原始数据 | |||
x2=np.array([1,3,2,2,8,6,7,6,7,1,2,1,1,3]) | |||
X=np.array(list(zip(x1,x2))).reshape(len(x1),2) | |||
plt.xlim([0,10]) | |||
plt.ylim([0,10]) | |||
plt.title('Instances') | |||
plt.scatter(x1,x2) | |||
colors=['b','g','r','c','m','y','k','b'] | |||
markers=['o','s','D','v','^','p','*','+'] | |||
clusters=[2,3,4,5,8] | |||
subplot_counter=1 | |||
sc_scores=[] | |||
for t in clusters: | |||
subplot_counter +=1 | |||
plt.subplot(3,2,subplot_counter) | |||
kmeans_model=KMeans(n_clusters=t).fit(X) #KMeans建模 | |||
for i,l in enumerate(kmeans_model.labels_): | |||
plt.plot(x1[i],x2[i],color=colors[l],marker=markers[l],ls='None') | |||
plt.xlim([0,10]) | |||
plt.ylim([0,10]) | |||
sc_score=silhouette_score(X,kmeans_model.labels_,metric='euclidean') #计算轮廓系数 | |||
sc_scores.append(sc_score) | |||
plt.title('k=%s,silhouette coefficient=%0.03f'%(t,sc_score)) | |||
plt.figure() | |||
plt.plot(clusters,sc_scores,'*-') #绘制类簇数量与对应轮廓系数关系 | |||
plt.xlabel('Number of Clusters') | |||
plt.ylabel('Silhouette Coefficient Score') | |||
plt.show() | |||
# - | |||
# ## How to determin the 'k'? | |||
# | |||
# 利用“肘部观察法”可以粗略地估计相对合理的聚类个数。K-means模型最终期望*所有数据点到其所属的类簇距离的平方和趋于稳定,所以可以通过观察这个值随着K的走势来找出最佳的类簇数量。理想条件下,这个折线在不断下降并且趋于平缓的过程中会有斜率的拐点,这表示从这个拐点对应的K值开始,类簇中心的增加不会过于破坏数据聚类的结构*。 | |||
# | |||
# | |||
# + | |||
import numpy as np | |||
from sklearn.cluster import KMeans | |||
from scipy.spatial.distance import cdist | |||
import matplotlib.pyplot as plt | |||
cluster1=np.random.uniform(0.5,1.5,(2,10)) | |||
cluster2=np.random.uniform(5.5,6.5,(2,10)) | |||
cluster3=np.random.uniform(3,4,(2,10)) | |||
X=np.hstack((cluster1,cluster2,cluster3)).T | |||
plt.scatter(X[:,0],X[:,1]) | |||
plt.xlabel('x1') | |||
plt.ylabel('x2') | |||
plt.show() | |||
# + | |||
K=range(1,10) | |||
meandistortions=[] | |||
for k in K: | |||
kmeans=KMeans(n_clusters=k) | |||
kmeans.fit(X) | |||
meandistortions.append(sum(np.min(cdist(X,kmeans.cluster_centers_,'euclidean'),axis=1))/X.shape[0]) | |||
plt.plot(K,meandistortions,'bx-') | |||
plt.xlabel('k') | |||
plt.ylabel('Average Dispersion') | |||
plt.title('Selecting k with the Elbow Method') | |||
plt.show() | |||
# - | |||
# 从上图可见,类簇数量从1降到2再降到3的过程,更改K值让整体聚类结构有很大改变,这意味着新的聚类数量让算法有更大的收敛空间,这样的K值不能反映真实的类簇数量。而当K=3以后再增大K,平均距离的下降速度显著变缓慢,这意味着进一步增加K值不再会有利于算法的收敛,同时也暗示着K=3是相对最佳的类簇数量。 |
@@ -6,7 +6,37 @@ | |||
"source": [ | |||
"# KNN Classification\n", | |||
"\n", | |||
"\n" | |||
"\n", | |||
"KNN最邻近规则,主要应用领域是对未知事物的识别,即判断未知事物属于哪一类,判断思想是,基于欧几里得定理,判断未知事物的特征和哪一类已知事物的的特征最接近;\n", | |||
"\n", | |||
"K最近邻(k-Nearest Neighbor,KNN)分类算法,是一个理论上比较成熟的方法,也是最简单的机器学习算法之一。该方法的思路是:如果一个样本在特征空间中的k个最相似(即特征空间中最邻近)的样本中的大多数属于某一个类别,则该样本也属于这个类别。KNN算法中,所选择的邻居都是已经正确分类的对象。该方法在定类决策上只依据最邻近的一个或者几个样本的类别来决定待分样本所属的类别。 KNN方法虽然从原理上也依赖于极限定理,但在类别决策时,只与极少量的相邻样本有关。由于KNN方法主要靠周围有限的邻近的样本,而不是靠判别类域的方法来确定所属类别的,因此对于类域的交叉或重叠较多的待分样本集来说,KNN方法较其他方法更为适合。\n", | |||
"\n", | |||
"KNN算法不仅可以用于分类,还可以用于回归。通过找出一个样本的k个最近邻居,将这些邻居的属性的平均值赋给该样本,就可以得到该样本的属性。更有用的方法是将不同距离的邻居对该样本产生的影响给予不同的权值(weight),如权值与距离成正比(组合函数)。\n", | |||
"\n", | |||
"该算法在分类时有个主要的不足是,当样本不平衡时,如一个类的样本容量很大,而其他类样本容量很小时,有可能导致当输入一个新样本时,该样本的K个邻居中大容量类的样本占多数。 该算法只计算“最近的”邻居样本,某一类的样本数量很大,那么或者这类样本并不接近目标样本,或者这类样本很靠近目标样本。无论怎样,数量并不能影响运行结果。可以采用权值的方法(和该样本距离小的邻居权值大)来改进。该方法的另一个不足之处是计算量较大,因为对每一个待分类的文本都要计算它到全体已知样本的距离,才能求得它的K个最近邻点。目前常用的解决方法是事先对已知样本点进行剪辑,事先去除对分类作用不大的样本。该算法比较适用于样本容量比较大的类域的自动分类,而那些样本容量较小的类域采用这种算法比较容易产生误分。\n", | |||
"\n", | |||
"K-NN可以说是一种最直接的用来分类未知数据的方法。基本通过下面这张图跟文字说明就可以明白K-NN是干什么的\n", | |||
"\n", | |||
"\n", | |||
"简单来说,K-NN可以看成:有那么一堆你已经知道分类的数据,然后当一个新数据进入的时候,就开始跟训练数据里的每个点求距离,然后挑离这个训练数据最近的K个点看看这几个点属于什么类型,然后用少数服从多数的原则,给新数据归类。\n", | |||
"\n", | |||
"\n", | |||
"算法步骤:\n", | |||
"\n", | |||
"* step.1---初始化距离为最大值\n", | |||
"* step.2---计算未知样本和每个训练样本的距离dist\n", | |||
"* step.3---得到目前K个最临近样本中的最大距离maxdist\n", | |||
"* step.4---如果dist小于maxdist,则将该训练样本作为K-最近邻样本\n", | |||
"* step.5---重复步骤2、3、4,直到未知样本和所有训练样本的距离都算完\n", | |||
"* step.6---统计K-最近邻样本中每个类标号出现的次数\n", | |||
"* step.7---选择出现频率最大的类标号作为未知样本的类标号" | |||
] | |||
}, | |||
{ | |||
"cell_type": "markdown", | |||
"metadata": {}, | |||
"source": [ | |||
"## Program" | |||
] | |||
}, | |||
{ | |||
@@ -1,3 +1,4 @@ | |||
# -*- coding: utf-8 -*- | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
@@ -20,7 +21,31 @@ | |||
# # KNN Classification | |||
# | |||
# | |||
# KNN最邻近规则,主要应用领域是对未知事物的识别,即判断未知事物属于哪一类,判断思想是,基于欧几里得定理,判断未知事物的特征和哪一类已知事物的的特征最接近; | |||
# | |||
# K最近邻(k-Nearest Neighbor,KNN)分类算法,是一个理论上比较成熟的方法,也是最简单的机器学习算法之一。该方法的思路是:如果一个样本在特征空间中的k个最相似(即特征空间中最邻近)的样本中的大多数属于某一个类别,则该样本也属于这个类别。KNN算法中,所选择的邻居都是已经正确分类的对象。该方法在定类决策上只依据最邻近的一个或者几个样本的类别来决定待分样本所属的类别。 KNN方法虽然从原理上也依赖于极限定理,但在类别决策时,只与极少量的相邻样本有关。由于KNN方法主要靠周围有限的邻近的样本,而不是靠判别类域的方法来确定所属类别的,因此对于类域的交叉或重叠较多的待分样本集来说,KNN方法较其他方法更为适合。 | |||
# | |||
# KNN算法不仅可以用于分类,还可以用于回归。通过找出一个样本的k个最近邻居,将这些邻居的属性的平均值赋给该样本,就可以得到该样本的属性。更有用的方法是将不同距离的邻居对该样本产生的影响给予不同的权值(weight),如权值与距离成正比(组合函数)。 | |||
# | |||
# 该算法在分类时有个主要的不足是,当样本不平衡时,如一个类的样本容量很大,而其他类样本容量很小时,有可能导致当输入一个新样本时,该样本的K个邻居中大容量类的样本占多数。 该算法只计算“最近的”邻居样本,某一类的样本数量很大,那么或者这类样本并不接近目标样本,或者这类样本很靠近目标样本。无论怎样,数量并不能影响运行结果。可以采用权值的方法(和该样本距离小的邻居权值大)来改进。该方法的另一个不足之处是计算量较大,因为对每一个待分类的文本都要计算它到全体已知样本的距离,才能求得它的K个最近邻点。目前常用的解决方法是事先对已知样本点进行剪辑,事先去除对分类作用不大的样本。该算法比较适用于样本容量比较大的类域的自动分类,而那些样本容量较小的类域采用这种算法比较容易产生误分。 | |||
# | |||
# K-NN可以说是一种最直接的用来分类未知数据的方法。基本通过下面这张图跟文字说明就可以明白K-NN是干什么的 | |||
#  | |||
# | |||
# 简单来说,K-NN可以看成:有那么一堆你已经知道分类的数据,然后当一个新数据进入的时候,就开始跟训练数据里的每个点求距离,然后挑离这个训练数据最近的K个点看看这几个点属于什么类型,然后用少数服从多数的原则,给新数据归类。 | |||
# | |||
# | |||
# 算法步骤: | |||
# | |||
# * step.1---初始化距离为最大值 | |||
# * step.2---计算未知样本和每个训练样本的距离dist | |||
# * step.3---得到目前K个最临近样本中的最大距离maxdist | |||
# * step.4---如果dist小于maxdist,则将该训练样本作为K-最近邻样本 | |||
# * step.5---重复步骤2、3、4,直到未知样本和所有训练样本的距离都算完 | |||
# * step.6---统计K-最近邻样本中每个类标号出现的次数 | |||
# * step.7---选择出现频率最大的类标号作为未知样本的类标号 | |||
# ## Program | |||
# + | |||
% matplotlib inline | |||
@@ -56,6 +56,20 @@ | |||
# | |||
# | |||
# + | |||
# %matplotlib inline | |||
import matplotlib.pyplot as plt | |||
import numpy as np | |||
plt.figure() | |||
plt.axis([-10,10,0,1]) | |||
plt.grid(True) | |||
X=np.arange(-10,10,0.1) | |||
y=1/(1+np.e**(-X)) | |||
plt.plot(X,y,'b-') | |||
plt.title("Logistic function") | |||
# - | |||
# 逻辑回归本质上是线性回归,只是在特征到结果的映射中加入了一层函数映射,即先把特征线性求和,然后使用函数$g(z)$将最为假设函数来预测。$g(z)$可以将连续值映射到0到1之间。线性回归模型的表达式带入$g(z)$,就得到逻辑回归的表达式: | |||
# | |||
# $$ | |||
@@ -104,6 +118,8 @@ | |||
# | |||
# | |||
# ## Program | |||
# + | |||
# %matplotlib inline | |||
@@ -192,6 +208,108 @@ logistic = Logistic(data, label) | |||
logistic.train(200) | |||
plot_decision_boundary(lambda x: logistic.predict(x), data, label) | |||
# ## How to use sklearn to resolve the problem | |||
# | |||
# + | |||
from sklearn.linear_model.logistic import LogisticRegression | |||
from sklearn.metrics import confusion_matrix | |||
from sklearn.metrics import accuracy_score | |||
import matplotlib.pyplot as plt | |||
# calculate train/test data number | |||
N = len(data) | |||
N_train = int(N*0.8) | |||
N_test = N - N_train | |||
# split train/test data | |||
x_train = data[:N_train, :] | |||
y_train = label[:N_train] | |||
x_test = data[N_train:, :] | |||
y_test = label[N_train:] | |||
# do logistic regression | |||
lr=LogisticRegression() | |||
lr.fit(x_train,y_train) | |||
pred_train = lr.predict(x_train) | |||
pred_test = lr.predict(x_test) | |||
# calculate train/test accuracy | |||
acc_train = accuracy_score(y_train, pred_train) | |||
acc_test = accuracy_score(y_test, pred_test) | |||
print("accuracy train = %f" % acc_train) | |||
print("accuracy test = %f" % acc_test) | |||
# plot confusion matrix | |||
cm = confusion_matrix(y_test,pred_test) | |||
plt.matshow(cm) | |||
plt.title(u'Confusion Matrix') | |||
plt.colorbar() | |||
plt.ylabel(u'Groundtruth') | |||
plt.xlabel(u'Predict') | |||
plt.show() | |||
# - | |||
# ## Multi-class recognition | |||
# + | |||
from sklearn.datasets import load_digits | |||
import matplotlib.pyplot as plt | |||
# load digital data | |||
digits, dig_label = load_digits(return_X_y=True) | |||
print(digits.shape) | |||
# draw one digital | |||
plt.gray() | |||
plt.matshow(digits[0].reshape([8, 8])) | |||
plt.show() | |||
# calculate train/test data number | |||
N = len(digits) | |||
N_train = int(N*0.8) | |||
N_test = N - N_train | |||
# split train/test data | |||
x_train = digits[:N_train, :] | |||
y_train = dig_label[:N_train] | |||
x_test = digits[N_train:, :] | |||
y_test = dig_label[N_train:] | |||
# do logistic regression | |||
lr=LogisticRegression() | |||
lr.fit(x_train,y_train) | |||
pred_train = lr.predict(x_train) | |||
pred_test = lr.predict(x_test) | |||
# calculate train/test accuracy | |||
acc_train = accuracy_score(y_train, pred_train) | |||
acc_test = accuracy_score(y_test, pred_test) | |||
print("accuracy train = %f, accuracy_test = %f" % (acc_train, acc_test) | |||
score_train = lr.score(x_train, y_train) | |||
score_test = lr.score(x_test, y_test) | |||
print("score_train = %f, score_test = %f" % (score_train, score_test)) | |||
# plot confusion matrix | |||
cm = confusion_matrix(y_test,pred_test) | |||
plt.matshow(cm) | |||
plt.title(u'Confusion Matrix') | |||
plt.colorbar() | |||
plt.ylabel(u'Groundtruth') | |||
plt.xlabel(u'Predict') | |||
plt.show() | |||
# - | |||
# ## Exercise - How to draw mis-classfied data? | |||
# | |||
# 1. How to obtain the mis-classified index? | |||
# 2. How to draw them? | |||
# ## References | |||
# | |||
# * [逻辑回归模型(Logistic Regression, LR)基础](https://www.cnblogs.com/sparkwen/p/3441197.html) | |||
@@ -0,0 +1,141 @@ | |||
# -*- coding: utf-8 -*- | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
# kernelspec: | |||
# display_name: Python 3 | |||
# language: python | |||
# name: python3 | |||
# language_info: | |||
# codemirror_mode: | |||
# name: ipython | |||
# version: 3 | |||
# file_extension: .py | |||
# mimetype: text/x-python | |||
# name: python | |||
# nbconvert_exporter: python | |||
# pygments_lexer: ipython3 | |||
# version: 3.5.2 | |||
# --- | |||
# # Chaining a PCA and a logistic regression | |||
# The PCA does an unsupervised dimensionality reduction, while the logistic regression does the prediction. | |||
# | |||
# We use a GridSearchCV to set the dimensionality of the PCA | |||
# + | |||
% matplotlib inline | |||
import numpy as np | |||
import matplotlib.pyplot as plt | |||
from sklearn import linear_model, decomposition, datasets | |||
from sklearn.pipeline import Pipeline | |||
from sklearn.model_selection import GridSearchCV | |||
logistic = linear_model.LogisticRegression() | |||
pca = decomposition.PCA() | |||
pipe = Pipeline(steps=[('pca', pca), ('logistic', logistic)]) | |||
digits = datasets.load_digits() | |||
X_digits = digits.data | |||
y_digits = digits.target | |||
# Plot the PCA spectrum | |||
pca.fit(X_digits) | |||
plt.figure(1, figsize=(4, 3)) | |||
plt.clf() | |||
plt.axes([.2, .2, .7, .7]) | |||
plt.plot(pca.explained_variance_, linewidth=2) | |||
plt.axis('tight') | |||
plt.xlabel('n_components') | |||
plt.ylabel('explained_variance_') | |||
# Prediction | |||
n_components = [20, 40, 64] | |||
Cs = np.logspace(-4, 4, 3) | |||
# Parameters of pipelines can be set using ‘__’ separated parameter names: | |||
estimator = GridSearchCV(pipe, | |||
dict(pca__n_components=n_components, | |||
logistic__C=Cs)) | |||
estimator.fit(X_digits, y_digits) | |||
plt.axvline(estimator.best_estimator_.named_steps['pca'].n_components, | |||
linestyle=':', label='n_components chosen') | |||
plt.legend(prop=dict(size=12)) | |||
plt.show() | |||
# + | |||
# Compare the performance | |||
from sklearn.datasets import load_digits | |||
from sklearn.linear_model.logistic import LogisticRegression | |||
from sklearn import decomposition | |||
from sklearn.metrics import confusion_matrix | |||
from sklearn.metrics import accuracy_score | |||
import matplotlib.pyplot as plt | |||
# load digital data | |||
digits, dig_label = load_digits(return_X_y=True) | |||
print(digits.shape) | |||
# draw one digital | |||
plt.gray() | |||
plt.matshow(digits[0].reshape([8, 8])) | |||
plt.show() | |||
# + | |||
# calculate train/test data number | |||
N = len(digits) | |||
N_train = int(N*0.8) | |||
N_test = N - N_train | |||
# split train/test data | |||
x_train = digits[:N_train, :] | |||
y_train = dig_label[:N_train] | |||
x_test = digits[N_train:, :] | |||
y_test = dig_label[N_train:] | |||
# do logistic regression | |||
lr=LogisticRegression() | |||
lr.fit(x_train,y_train) | |||
pred_train = lr.predict(x_train) | |||
pred_test = lr.predict(x_test) | |||
# calculate train/test accuracy | |||
acc_train = accuracy_score(y_train, pred_train) | |||
acc_test = accuracy_score(y_test, pred_test) | |||
print("accuracy train = %f, accuracy_test = %f" % (acc_train, acc_test)) | |||
# + | |||
# do PCA with 'n_components=40' | |||
pca = decomposition.PCA(n_components=40) | |||
pca.fit(x_train) | |||
x_train_pca = pca.transform(x_train) | |||
x_test_pca = pca.transform(x_test) | |||
# do logistic regression | |||
lr=LogisticRegression() | |||
lr.fit(x_train_pca,y_train) | |||
pred_train = lr.predict(x_train_pca) | |||
pred_test = lr.predict(x_test_pca) | |||
# calculate train/test accuracy | |||
acc_train = accuracy_score(y_train, pred_train) | |||
acc_test = accuracy_score(y_test, pred_test) | |||
print("accuracy train = %f, accuracy_test = %f" % (acc_train, acc_test)) | |||
# - | |||
# ## References | |||
# * [Pipelining: chaining a PCA and a logistic regression](http://scikit-learn.org/stable/auto_examples/plot_digits_pipe.html) |
@@ -1,500 +0,0 @@ | |||
7.475811838082768723e-03 4.320836189610323119e-01 1.450549175723391038e+00 | |||
4.991058415580543750e-01 2.094374801667562291e-01 1.102532698521218268e+00 | |||
1.193536163361264002e-01 5.963489824485116442e-01 1.338273355186238245e+00 | |||
4.769187785997547335e-01 9.109195645869490043e-01 2.602219201834365947e+00 | |||
7.303936733915606938e-01 8.857684867949857654e-01 2.210152595170989276e+00 | |||
9.664601349489546633e-01 7.502994138032407223e-01 2.611077802324385910e+00 | |||
2.542019778882254055e-02 7.428502628957164289e-01 1.710698945274088834e+00 | |||
1.778136599348375535e-01 5.930384510525207320e-01 2.233352927908840435e+00 | |||
4.492592286670140656e-01 8.931411427088303823e-01 3.102819283880683177e+00 | |||
8.370430987882071516e-02 2.614373501818388901e-01 8.640697795350422705e-01 | |||
8.341080791107999826e-01 6.784596613777602592e-01 3.226838828319181562e+00 | |||
7.167018545861755241e-01 1.103531872063134855e-02 3.812269666339226926e-02 | |||
8.183226795351219440e-01 6.657461817854845032e-01 1.547628048238904874e+00 | |||
2.834730567607464113e-01 3.260372156848528880e-01 1.066274317849608133e+00 | |||
4.816520737750709102e-01 9.933843474355296133e-01 3.471689866478016473e+00 | |||
5.507062166025468164e-01 1.219201174732618131e-01 -3.873755702169334736e-02 | |||
9.275675675329209113e-01 7.595330167450311531e-01 2.289908502396237111e+00 | |||
7.561238363026515641e-01 6.840828577303232905e-01 2.519846561855788813e+00 | |||
4.957723968953803295e-01 8.670850445766850045e-01 2.935924016824970284e+00 | |||
5.408335800327461751e-01 3.247707156381132165e-01 8.585149457083230962e-01 | |||
6.892698013969100757e-02 9.085711937091790480e-01 2.274493253938949078e+00 | |||
6.741864184517211145e-01 7.817210669276070112e-01 2.513319828565115710e+00 | |||
8.243668219151468746e-01 2.917544002350455079e-01 1.599121705167765972e+00 | |||
8.776077707996776667e-01 1.677106045886949115e-01 7.843847468722240368e-01 | |||
2.500776223292151546e-01 3.588873034434612030e-01 1.653274592743033899e+00 | |||
2.021786578617587038e-01 1.418393464135232485e-01 3.933672100694594609e-01 | |||
2.685507556945776475e-02 7.321598812138923051e-01 2.127765105548871993e+00 | |||
8.195523222775495942e-01 3.451370805481794868e-01 1.377961502837144137e+00 | |||
1.971981386783976387e-01 4.517957732604490628e-01 1.883090356939532306e+00 | |||
9.324125360887314251e-02 8.518183180329133020e-01 2.720069834246886309e+00 | |||
2.682640801887333781e-01 7.666508565073675285e-01 2.133635338893320910e+00 | |||
5.988554394607304987e-01 5.078756300002995605e-02 -1.323176605521795279e-01 | |||
4.132057609719871349e-01 7.733878480448631576e-01 2.669836471857727211e+00 | |||
8.968299298666455588e-01 1.759543852410582199e-01 9.980225650229066492e-01 | |||
6.604143061271943171e-01 6.340983996248182875e-01 2.105840665588138982e+00 | |||
5.266614520604195882e-02 1.525488634313088010e-02 -2.153966553289855534e-01 | |||
6.974062198613504293e-01 5.944606189059847834e-01 1.763717859467236915e+00 | |||
5.387863758358174548e-01 1.019159109038116906e-01 9.558460757183967482e-02 | |||
6.249043870655015542e-01 4.299862339088595942e-01 1.522006406940794809e+00 | |||
6.661354880603580497e-01 6.036168002547487310e-01 1.930785326585231942e+00 | |||
4.085927067704629989e-01 3.085466034371613375e-01 8.242125987920496666e-01 | |||
2.645712089691105717e-01 7.794962481435983026e-01 2.138245618867576248e+00 | |||
5.732978944125887155e-01 4.848706031355866086e-01 1.080208500702684260e+00 | |||
2.713139146988112760e-01 1.379451495861444554e-01 4.228339638555064228e-01 | |||
2.755820924890763912e-01 8.530702479483407430e-01 3.322053433962536939e+00 | |||
2.091501940600326570e-02 2.048108692344442483e-01 4.191702012093132534e-01 | |||
1.534394912084446894e-02 9.148363421420675490e-01 1.800710510442548484e+00 | |||
1.278958637503535556e-01 3.910178296001215470e-01 1.326305294981870109e+00 | |||
7.343913194845445025e-01 9.578241564768665839e-01 3.666444442007755988e+00 | |||
8.728738265684310438e-01 8.137136652443932583e-02 -6.680219410291982074e-02 | |||
3.434930909495775841e-01 3.668093961439626849e-01 1.298070495530361690e+00 | |||
6.340750930182676992e-01 4.293034895647808158e-01 1.753627303728895637e+00 | |||
7.916753701139102040e-01 8.792271853310851260e-01 2.593999286745963584e+00 | |||
1.783997341756226040e-01 3.861425389666840458e-01 9.128976540430178144e-01 | |||
5.928812756610245538e-01 4.438943836328812509e-02 1.000094214885776989e+00 | |||
9.664522600713033595e-01 9.466939185617491148e-01 2.906269893635449630e+00 | |||
4.066561253239548979e-01 9.171665625609342065e-01 2.742986919040616645e+00 | |||
8.236115710884057695e-01 2.647959880978716374e-01 9.459738056948978624e-01 | |||
6.320423316726820895e-01 8.360438827586792465e-01 3.216488737449289648e+00 | |||
5.138224534917832376e-01 5.374361346225066116e-01 1.527247360071752880e+00 | |||
4.579382292678627620e-01 3.949198869420947888e-01 1.501425648147641345e+00 | |||
9.576670559259582438e-01 3.983246523147065954e-01 1.237575342612352136e+00 | |||
1.138067686089871966e-01 1.567309453549632359e-01 7.005322455414590976e-01 | |||
6.404494439786647675e-01 1.780641159603901791e-01 7.938637139365654072e-01 | |||
1.387840471133794384e-02 6.237823673113149781e-01 1.476386204302320593e+00 | |||
3.030445989624356296e-01 6.198141088519637520e-02 4.567005603096885302e-01 | |||
3.802531980402333867e-01 5.409927006646785275e-01 1.883581189342869333e+00 | |||
6.705087997031178304e-02 1.741428118633180366e-01 9.121942481269487146e-01 | |||
8.211668779283721742e-01 5.249226808311039383e-01 2.170534841817413074e+00 | |||
2.069294306883516787e-01 2.366319826124895220e-01 1.542613224726550936e+00 | |||
9.532381178881612627e-01 8.535486150346145440e-01 2.356388047056950708e+00 | |||
7.960224813956610079e-01 3.619061945798818236e-02 5.172188609486845223e-01 | |||
8.877672702671365323e-01 9.713145572816609397e-01 3.306905373104077839e+00 | |||
7.489306869397681643e-01 8.473656034978621632e-01 2.535918702634719413e+00 | |||
7.177108320559019150e-01 1.087337598575145448e-01 1.586935432727936135e-01 | |||
2.447720414767670105e-01 5.109625703599354196e-01 1.407260059648873574e+00 | |||
9.890927331418312152e-01 2.953923437136736219e-01 1.072070459129647713e+00 | |||
1.086920779608940846e-01 6.318071227439751025e-01 1.557679918613608994e+00 | |||
9.644991742126940437e-01 4.703489200292593209e-01 1.636317030674938611e+00 | |||
6.213140595058208593e-02 8.477486902449127282e-02 3.813767326246751410e-01 | |||
4.952571105530045870e-01 5.700052493447709256e-01 1.934233726936544162e+00 | |||
2.726756709341970897e-01 2.146651782929371866e-01 7.986346066861963466e-01 | |||
8.917441922488205108e-01 9.560923675042851677e-01 3.119016030037050413e+00 | |||
7.707565352454615049e-02 1.161474483544463476e-01 -1.644060962672083126e-01 | |||
8.619330010808275988e-01 5.483846347652111541e-01 1.887271595787430201e+00 | |||
6.756497088473898405e-01 1.596640416697148179e-02 3.250751138478518709e-01 | |||
6.078153373128571957e-01 3.720033630724720597e-01 1.411933889913144835e+00 | |||
3.174037326900748246e-01 1.549614002822162773e-01 8.161302398949438208e-01 | |||
4.476293363565838623e-01 8.210638934292713298e-01 2.480054998404300992e+00 | |||
7.084428331937857592e-01 6.952726744740225229e-01 1.912595645951341350e+00 | |||
7.313728224808281331e-01 4.590841357253414579e-01 1.092024568392572004e+00 | |||
8.346529609846853326e-01 6.280661067885178195e-01 1.703742672110366119e+00 | |||
1.949849239944413393e-01 7.815804264527685952e-01 2.740131052532522649e+00 | |||
3.618961197943663111e-01 9.493746530596582911e-01 3.483716212818180669e+00 | |||
9.426901777014041750e-01 5.218380228405639087e-02 4.376997690535605612e-01 | |||
8.226362345946165444e-01 4.809419675081207934e-01 1.458586094754612761e+00 | |||
7.067354980129005870e-01 7.228649825823781905e-01 2.419791589190408487e+00 | |||
1.023755386264367795e-01 7.922838157573184326e-01 2.441085427209842340e+00 | |||
2.289851593560128240e-01 5.546786377010594071e-01 1.647696527055220850e+00 | |||
4.460650478440573163e-01 2.191506522298012349e-01 1.024946687942562829e+00 | |||
4.483396050145066480e-01 2.429242359343196656e-01 1.309989269584039384e+00 | |||
9.430377189635058555e-01 8.411778387111719102e-01 3.432728797502154983e+00 | |||
2.864482468911577762e-02 8.546327218757242550e-01 2.722370794510544201e+00 | |||
2.241511135464431259e-01 7.859659876748782414e-01 2.491116071935152387e+00 | |||
9.966196752234992573e-01 6.955150962623877220e-01 3.093102615008462841e+00 | |||
2.477410776072497267e-01 8.178117265377349288e-02 7.342611816274411396e-01 | |||
3.790620764631877915e-01 9.593005540814388832e-01 3.171558355615661906e+00 | |||
7.558769731762405852e-01 2.294088007751564628e-01 5.538922276913116338e-01 | |||
7.192011532951225750e-02 6.754074740436515345e-01 2.527960192024258479e+00 | |||
3.234731742291891976e-01 8.990872474399824732e-02 2.308891315011747103e-01 | |||
1.105033974749317638e-01 3.560288831966261780e-01 1.355952289465385707e+00 | |||
4.316861899693822391e-01 6.140824910286386551e-01 2.175048267024927817e+00 | |||
4.173677249147860202e-01 8.307176675711246405e-01 2.891136308161926127e+00 | |||
4.878284313449211540e-01 1.217117118902810713e-02 3.765562264346458687e-02 | |||
5.698100028891839308e-02 9.369647431950471894e-01 2.828524603619942734e+00 | |||
7.937063049510053592e-01 9.965934172054009554e-01 3.621790555298875613e+00 | |||
9.185931909842128817e-01 6.488091171997073525e-01 2.143860442680804557e+00 | |||
9.007204053721260539e-01 8.537714025365400472e-01 2.646754629169127071e+00 | |||
8.278514553747188254e-01 2.005889941342431371e-01 7.271439836489447250e-01 | |||
2.373489829046158128e-01 1.418258400242289330e-02 -6.020465063396163163e-01 | |||
6.117272773480567638e-01 6.369715377051763383e-01 1.454500098779854422e+00 | |||
9.961403950188231216e-01 3.421270980816193408e-01 1.695221418633425792e+00 | |||
3.060008110713063889e-02 6.001080183202910368e-01 2.002526417406803372e+00 | |||
2.164319231564044710e-01 8.136362177731774059e-01 1.757163156232044887e+00 | |||
4.320315166604467016e-01 4.720548127901512681e-01 1.795390386829843532e+00 | |||
9.817822535962430486e-01 9.617769716915969269e-01 2.665581707035302728e+00 | |||
7.824931791481782861e-01 8.442127024468111252e-01 3.127126340389913661e+00 | |||
9.034259616014704841e-01 3.510687988820446748e-01 1.524380717380441341e+00 | |||
9.340361687788649725e-01 6.443396379643503424e-01 1.577033167963769245e+00 | |||
9.349627019103919912e-01 9.740965725817858356e-01 2.777737402561453184e+00 | |||
5.903568814739056370e-01 3.993580841930692849e-01 9.865016362870495659e-01 | |||
4.310475296439005843e-01 4.231135093604750930e-01 6.160817574532500007e-01 | |||
4.066501997846739824e-01 7.450933101505008427e-01 2.074051841595779599e+00 | |||
1.029377684482553068e-01 3.799493301021364955e-01 1.256140270860516495e+00 | |||
2.166875813791702132e-01 5.680548857183227440e-01 1.860536057054025694e+00 | |||
7.361620363451337745e-03 6.692094353896700376e-01 2.297233859209406592e+00 | |||
2.251381419088346325e-01 8.652108236891861148e-01 2.666916298098993998e+00 | |||
1.504251225087782640e-01 1.130070792465698304e-02 -5.110469555600358760e-01 | |||
5.937063067825341101e-01 2.716007117804829507e-01 1.267438724867238964e+00 | |||
4.634541081507811411e-01 6.681602022681033537e-01 2.564355938834183224e+00 | |||
2.250187085316336377e-01 9.707240268187979915e-01 2.676052205504153569e+00 | |||
9.815037489648346103e-01 2.641637699209509194e-02 1.554105300329002293e-01 | |||
5.798419531491214585e-01 7.299465348305756995e-01 2.111624689082501227e+00 | |||
8.972502355513533034e-01 9.156452851300300733e-01 3.126488976232349337e+00 | |||
4.125381992536247822e-01 3.249668969776204897e-01 1.830878545388462175e+00 | |||
8.269411939892717944e-01 7.979113620685827080e-01 2.707397108997154689e+00 | |||
9.993682944332172857e-01 6.221934355638074621e-01 2.549876142740037821e+00 | |||
1.309511636557203484e-02 3.505925898386655915e-01 1.305297897892122805e+00 | |||
4.102824415325945218e-02 7.457568198685821104e-01 1.543540270208316301e+00 | |||
8.117891142542627003e-01 1.272650640958630230e-01 1.082974948650097691e+00 | |||
7.481657398409454762e-01 9.308262831414119143e-01 2.578937787382042934e+00 | |||
5.088769917590663105e-03 5.245899619502442102e-01 1.727984379791936531e+00 | |||
6.400399192179795360e-01 9.629290544827761034e-01 2.938595199549385306e+00 | |||
2.773619565845855117e-01 4.542961730983430257e-01 1.168737911397091711e+00 | |||
6.686043177934657411e-01 7.553176981648601629e-01 1.644825403924084384e+00 | |||
2.637964834961262772e-01 9.844424430040311647e-01 3.632246264813325798e+00 | |||
4.529904429357893836e-01 5.591014088518869718e-01 1.421411174425226243e+00 | |||
5.914692044780885949e-01 5.224614059870554827e-01 1.501115101695089438e+00 | |||
5.278611861799314964e-01 8.706323340559083412e-01 2.599792319687709963e+00 | |||
7.265719805715095125e-01 4.647055521928602495e-01 1.623872125626333318e+00 | |||
2.374412593234882518e-01 6.781222697832595125e-01 2.978750721035211768e+00 | |||
1.096602828167132504e-01 6.233577231346018044e-01 1.686962232177067644e+00 | |||
8.285502135116481481e-01 3.054139242298121726e-01 1.144537009942264483e+00 | |||
7.915711302950751982e-01 7.848184053509537073e-01 2.892002414681882794e+00 | |||
1.762498459189343336e-02 4.855504085991589891e-01 1.619558293913433200e+00 | |||
6.262818009408183118e-01 1.603201032517899138e-01 6.252513064490023975e-01 | |||
2.830688286014748645e-02 5.242293138669631425e-01 1.202461608418680550e+00 | |||
3.427022322407036192e-01 7.081650882875548669e-01 2.150677972793977322e+00 | |||
5.135236067462749521e-01 2.438597121143804003e-01 6.384817981414274701e-01 | |||
2.868808958191264580e-01 7.384571396801111742e-01 3.105295444220282697e+00 | |||
9.736860152443834737e-01 2.079390705386722216e-01 1.557250606379207980e-01 | |||
6.894495333611394550e-02 6.130594376948936830e-01 2.315876000332821594e+00 | |||
1.168590212315750998e-01 4.037889878947520383e-02 3.152764638159926136e-01 | |||
3.147609473561659987e-01 8.448606103547418034e-02 3.681880790089999689e-01 | |||
1.905475300857236709e-01 5.576451121753612794e-01 1.735782833118906243e+00 | |||
8.174670994045084571e-01 7.522671285033422084e-02 2.910705593068040886e-01 | |||
2.930085957594632529e-01 9.567630851274011583e-01 3.172637074347888664e+00 | |||
7.861908323714857927e-01 1.787212591142121054e-01 8.352226099770573287e-01 | |||
8.722257250051403954e-01 8.081061274551285534e-01 2.732557324289121681e+00 | |||
6.899075115902838506e-01 9.443001359106594217e-01 3.398727077247677641e+00 | |||
6.031118963945732991e-01 3.453224833949740669e-01 1.425321208931602079e+00 | |||
3.407956303640705675e-01 9.140476767703493710e-01 3.401458356178787668e+00 | |||
1.945621485096249437e-01 5.923940054625942420e-01 2.566378888181803930e+00 | |||
3.161255927431180224e-01 7.527318319996485485e-01 1.902379634338783321e+00 | |||
9.542504369658332264e-01 7.563093485068556054e-01 1.960997642569395571e+00 | |||
7.503283219779900426e-01 5.828935470434386534e-01 2.446196965775033760e+00 | |||
4.578297753833164840e-01 3.519832708438854763e-01 8.666562129287116445e-01 | |||
6.068951551160934876e-01 3.577481854345385681e-01 1.279663020567587850e+00 | |||
9.286620665777755024e-01 1.989483889154278762e-01 4.980914890658610372e-01 | |||
9.089161333377616669e-01 9.360413652931209860e-01 2.729168699217892513e+00 | |||
2.715020470435951516e-01 6.263210538722676635e-01 1.953520274587866812e+00 | |||
4.484390918011750271e-01 6.187772636825238370e-01 2.289293295258222116e+00 | |||
6.530080949979680804e-01 3.122995150457112956e-01 1.618902454343966735e-01 | |||
2.463316498886304329e-01 5.226564427863324980e-01 1.229133364816173302e+00 | |||
4.141071607560687928e-01 4.798880667179711290e-01 1.484116980207792924e+00 | |||
3.744430435917297517e-01 3.136728207944586844e-01 1.394232745042316868e+00 | |||
9.704834640778647881e-03 2.409452879945052084e-01 1.099945707772545633e+00 | |||
1.247533430143018540e-01 9.278300040342389865e-01 1.969712680896271717e+00 | |||
9.639914914830710613e-01 7.146319051580746207e-01 1.916158759622974328e+00 | |||
9.697007427592747586e-01 4.344846988263826315e-01 1.529407530737709742e+00 | |||
6.432789358766941579e-01 1.159531498655849147e-01 8.070333907812549112e-02 | |||
7.954444389320014519e-02 7.857091264773297112e-03 -3.041162080678399410e-02 | |||
2.056292786003366402e-02 9.139983937786330115e-01 2.283253682769141424e+00 | |||
6.896932244254023736e-01 4.071345432110594631e-01 1.608724082101808062e+00 | |||
1.179590669697371252e-01 2.516637324101815842e-01 6.254825903210164961e-01 | |||
5.717954038454533539e-01 7.118334045214347316e-02 4.939509993242007790e-01 | |||
7.652646562234205474e-01 3.355166863650271836e-01 6.348712599175490023e-01 | |||
8.211691611012003067e-01 8.338493619363586129e-01 2.737468720090254326e+00 | |||
2.169688389528605654e-01 3.278673542269121555e-01 1.327212744232495689e+00 | |||
8.825445819422383931e-01 3.988053665091583211e-01 1.838918711522194549e+00 | |||
7.630053686338290619e-01 7.118875637069269624e-01 2.036324433274867118e+00 | |||
8.682088181253480030e-01 8.022112043828562022e-01 2.771127215122880294e+00 | |||
1.617129307945597283e-01 3.235671693450348219e-02 2.576392368990635950e-01 | |||
2.998997587073576332e-01 9.881916976518770879e-01 3.636341806624223238e+00 | |||
4.166599629826496232e-01 4.948038506801873959e-01 1.604337709904139819e+00 | |||
5.346101161310027328e-01 3.346227769794102302e-01 1.407399360732258575e+00 | |||
5.586469090878520838e-01 9.291034722693523040e-01 2.834402154732535095e+00 | |||
7.495713414978937283e-01 2.230812889919575603e-02 -2.557682884596298445e-01 | |||
1.776544927826729792e-01 8.330048151716381577e-01 3.165953386815831827e+00 | |||
2.370892922909140221e-01 9.645650999746180343e-01 2.848768860709277373e+00 | |||
6.461924835393765276e-01 9.880609977186249893e-02 1.668412393504021818e-01 | |||
1.553521768486154642e-02 6.646524478743683906e-01 2.595752388284229717e+00 | |||
2.304588954137405388e-01 6.865243600025272563e-02 5.432053718528367936e-01 | |||
2.895677903304353373e-01 6.671957989519927690e-02 1.011850249507597832e+00 | |||
1.509853317223704972e-01 4.906855357405467410e-01 1.149671642059096444e+00 | |||
2.695189180420017871e-01 6.034853329583748094e-01 1.407443348919456216e+00 | |||
3.459764986793533170e-01 2.784403907717262205e-01 1.269648748329526677e+00 | |||
6.512027084577440839e-01 8.181789021711619592e-01 2.478800034415616693e+00 | |||
3.889098625215544613e-01 3.863808815073727088e-01 1.725517777318716028e+00 | |||
1.566168732205782677e-01 5.040095278958177660e-01 1.445050834954967778e+00 | |||
1.506836183177537869e-01 7.794054440250003379e-01 1.827809424017371231e+00 | |||
6.008472067370227432e-01 6.576294941390324889e-01 2.840072456764028530e+00 | |||
3.529786254613809238e-02 7.906953468826860565e-02 2.914537265792652709e-01 | |||
5.838745443766220067e-01 4.772899699307178167e-01 1.562650054560695834e+00 | |||
1.146386622596683447e-01 9.203557773271238140e-01 2.689497621397455962e+00 | |||
7.527449308386525706e-01 6.556932866566937967e-01 1.353679760380875008e+00 | |||
5.772945696666064164e-01 9.877894057653435755e-02 7.237928965909140588e-01 | |||
8.474621057352611508e-01 4.665830019054897360e-01 1.901384774582225878e+00 | |||
4.727060331868276899e-01 3.727054816128867465e-01 1.321924051574254966e+00 | |||
5.098999975310586352e-01 9.609943282665764919e-01 3.645184030881743364e+00 | |||
6.906392732383714872e-02 6.549105132437355037e-01 3.089488200682487751e+00 | |||
7.823921352054278300e-01 9.097819054222342494e-01 2.846727756943917775e+00 | |||
9.943033096890919742e-01 8.444706344914977292e-01 3.381869668356601633e+00 | |||
5.178662245948807419e-01 4.365645138590940055e-01 1.617148994110577664e+00 | |||
9.534817465122435998e-02 1.230465622023014793e-01 5.958602476658144687e-01 | |||
9.136055467588631851e-01 9.813729037345580997e-01 2.801133207970882388e+00 | |||
8.333185088873177326e-01 3.682929206749316897e-01 1.581381594818430436e+00 | |||
4.771459736855861999e-01 5.740580398746287782e-01 1.045186811835778551e+00 | |||
3.367532669053258099e-01 9.863295921596789695e-01 3.324781710758984765e+00 | |||
1.594746085675310399e-01 1.159932473938930508e-01 1.169134788908752665e+00 | |||
1.194465875871124627e-01 7.888348212226486966e-01 1.975195364474281234e+00 | |||
9.220174949309249968e-01 1.121314629070164770e-01 3.351474538644472201e-01 | |||
2.541061042050157814e-01 9.685797070542766507e-01 2.153441410522853872e+00 | |||
8.307572662553505127e-01 2.508448740688680179e-01 5.784611982958615117e-01 | |||
1.376235448843345921e-01 3.617318129659841119e-02 6.221258106163279900e-03 | |||
1.840436481673137781e-01 6.808323222169596622e-01 1.882607549485687315e+00 | |||
6.342511202490156430e-01 6.365513878416173554e-02 4.041269791780713883e-01 | |||
6.123378869386941759e-01 8.101128409854028156e-01 2.459317063345435983e+00 | |||
1.397822621492095241e-02 6.212810177816235901e-01 2.486015825552446934e+00 | |||
2.648893147818288663e-01 7.721405542346180262e-01 2.969950102138114278e+00 | |||
4.355738705629145224e-01 5.015560297114354249e-01 1.631130975396188676e+00 | |||
6.767645550708526470e-01 6.719988217135804964e-01 2.402001209385296487e+00 | |||
8.969482811011396661e-01 6.216162704899576008e-01 2.391939545978828185e+00 | |||
2.807005865638156905e-01 8.765866332782691339e-01 2.445553814010947402e+00 | |||
8.776631729638195711e-01 4.409557801676966537e-02 3.437035580118271705e-01 | |||
1.132318932995939331e-01 3.878734620588315529e-01 1.126828874551153659e+00 | |||
6.505608066743523432e-01 5.570322166309906731e-01 1.405162532082802507e+00 | |||
6.352826777968835215e-01 9.032073994924333382e-01 2.875864603770365679e+00 | |||
6.075075314259952286e-01 9.988317036228566703e-02 2.788370701672525653e-01 | |||
5.520087431768604391e-01 9.498021697608852731e-01 2.877145609157186534e+00 | |||
3.487420901728126577e-01 3.894438219111665545e-01 1.967127108295036342e+00 | |||
9.888404807347624947e-01 9.617904325249965192e-01 2.959924223051830339e+00 | |||
4.969393094569252156e-01 7.919237167339300276e-01 3.236077804420792958e+00 | |||
4.616402216694177607e-01 7.207236959796187170e-01 2.939117327649773515e+00 | |||
3.099152128633931857e-01 8.613633170191453692e-01 3.079330776691408644e+00 | |||
7.086847469763453145e-01 8.869181625632798216e-01 2.871147840108290161e+00 | |||
5.377941856756937034e-01 6.587775309256123091e-01 2.160967816912532591e+00 | |||
7.646364267701427631e-01 2.455421095608366633e-01 8.572966621828197464e-01 | |||
8.622835239476616120e-01 3.000231196956705260e-01 1.259991882594763801e+00 | |||
3.345859103381273458e-01 6.329893531710280019e-01 2.681060716320600612e+00 | |||
9.934007225599694735e-01 4.543513071834827111e-01 1.315428869334155149e+00 | |||
5.759080950066908233e-02 2.884858963551498734e-01 -7.850174925890418542e-02 | |||
4.269505081601583374e-01 8.051743259836636746e-01 1.978542162816029570e+00 | |||
6.524171982482834808e-01 9.279905907169705426e-01 3.060068499975705070e+00 | |||
6.734843198314031110e-01 6.334571393198266120e-01 1.336145401266799482e+00 | |||
3.575328041680904123e-01 3.986721918236949547e-01 7.723321667448483918e-01 | |||
9.403819198182535866e-01 6.083547482391497008e-01 2.086252971355057717e+00 | |||
8.261928456459203263e-01 7.570886069856381795e-01 2.281536649939948092e+00 | |||
9.971820442967808118e-01 7.992046415584097563e-01 2.704741105823545322e+00 | |||
5.172650976969177528e-01 3.724498668637581389e-01 1.673405379121174485e+00 | |||
8.705251688169839408e-01 2.588070565460098837e-02 2.202940496938309289e-01 | |||
1.833966134012716687e-01 4.876258081198566741e-01 1.679427029342003275e+00 | |||
1.513744990398393497e-01 1.073838804974480698e-01 5.451570316432030605e-02 | |||
4.189926345521661766e-01 8.098320316679730180e-02 8.531055954522728801e-02 | |||
7.543787800059154058e-02 9.848391042115073324e-01 3.320126219861897710e+00 | |||
9.443531958206554666e-01 3.626975445534534881e-01 1.522215910344035361e+00 | |||
9.301133473836653565e-03 7.468683831400360074e-01 2.334020525485814623e+00 | |||
9.642102501462823660e-02 9.376298281258887979e-01 2.880496019421884935e+00 | |||
8.756395279972406520e-03 3.716757697309374109e-01 1.208645232007073744e+00 | |||
4.687172502852384737e-01 3.820892126647807974e-01 1.854532680014708834e+00 | |||
5.913476236001140585e-01 2.189669997631138454e-01 1.012501326332066132e+00 | |||
7.387636329667006674e-01 5.920302587231973668e-01 2.412974575569116098e+00 | |||
8.630538270295081693e-01 7.792900613671284171e-01 1.878971948852477647e+00 | |||
3.338134227415587851e-01 5.674753975143114193e-01 1.695213821857337599e+00 | |||
9.584543540591695665e-01 5.927514450250956912e-01 1.370840158264928110e+00 | |||
6.241369121140725174e-01 6.771593195494135520e-01 2.488457865425464988e+00 | |||
9.371467607092121010e-02 5.101183721846589414e-01 1.744021612594066850e+00 | |||
2.493145976044208734e-02 8.668047761071785429e-01 2.752694079365073154e+00 | |||
4.464666703723130237e-02 6.965892190700817554e-02 3.990281308246132475e-02 | |||
5.023918872272138403e-01 2.040379432923519198e-01 5.516673767989469512e-01 | |||
5.877438415473072997e-01 6.615594275407946645e-01 2.239059703919524136e+00 | |||
2.542344007676432538e-01 2.107911890480862471e-02 -1.814458337391371134e-01 | |||
2.605715458050029687e-01 3.705245057473609549e-01 1.623875658479395634e+00 | |||
3.079127212378973688e-02 5.645134485765185284e-01 1.396775220088774416e+00 | |||
2.935412713526179829e-01 6.360212413725220282e-01 1.784000484477348714e+00 | |||
6.921359345671137531e-01 4.471545089601747236e-02 6.832107745984026259e-01 | |||
6.008873041498907774e-01 7.245640966372686176e-01 2.449924662944541254e+00 | |||
4.188308403428122606e-01 8.643009544846647074e-01 2.760272756702002894e+00 | |||
7.900049678498655137e-01 5.336134216172517819e-01 2.017342915403988357e+00 | |||
8.916015312642323609e-01 5.935368189827650021e-02 6.154584039205877044e-01 | |||
6.886796487300256286e-01 7.351803922338089325e-01 1.955426791912523221e+00 | |||
4.097948721282183016e-01 8.938378872878225412e-01 3.119817310501979524e+00 | |||
7.971993055816052332e-01 2.098045641727298571e-01 1.283885910722696089e+00 | |||
2.983937173575484936e-01 5.264553012450376990e-01 1.531566251661298006e+00 | |||
7.088314414946647046e-01 8.127418738639392570e-01 2.358159220704926007e+00 | |||
2.794745935386844238e-02 4.257614762297438071e-01 2.267702959680207631e+00 | |||
1.442114455984099264e-01 8.172152263897196622e-01 2.893849234749763077e+00 | |||
5.483618677638720795e-01 5.176509315932831257e-04 1.301219352870777612e+00 | |||
6.427378960850457235e-01 7.552474603789007812e-01 2.472390033497665840e+00 | |||
9.012632286266673010e-01 3.977661209204516046e-01 1.584245481446183046e+00 | |||
7.751416839326413788e-01 6.567156244957739197e-01 2.375680585799090139e+00 | |||
7.881789859035825963e-02 1.398809975130443917e-02 1.062458766386323861e+00 | |||
7.285305002856791168e-01 7.524358560494234238e-01 1.777475646463399617e+00 | |||
2.379830689652752129e-01 8.531137154968296832e-01 3.784977853443068696e+00 | |||
6.570932175321538837e-01 6.318373986277122656e-01 2.448493198991143593e+00 | |||
1.769712777161163153e-01 4.881231080553715129e-01 1.218194871634332488e+00 | |||
9.787091902618055839e-01 7.234517063722307739e-01 2.078608673775485638e+00 | |||
5.325543940594856807e-01 3.878463636417355254e-01 1.117218762232110807e+00 | |||
3.776294030850475547e-01 2.938102869436330922e-01 1.323709562131879736e+00 | |||
4.523604325372018176e-01 1.784904810357289540e-01 4.552447289805042763e-01 | |||
1.181792173078843877e-02 8.635937518630699694e-01 3.410988741402070090e+00 | |||
7.034518033946576265e-01 3.367757382672672550e-01 8.910639855322948222e-01 | |||
8.948050446054094387e-01 8.843895733410821469e-01 3.156833444330739091e+00 | |||
6.259623697221685656e-01 5.704639272128342320e-02 1.958388498885558493e-01 | |||
5.235755207298292691e-01 3.340819322875727693e-01 6.268974481184578229e-01 | |||
8.638754732141373793e-02 2.774203914755931644e-01 2.935415055568650899e-01 | |||
4.317114149216787489e-01 4.650713813752526127e-01 1.995060795329805625e+00 | |||
8.917705774149951026e-01 1.697700091575871095e-01 5.572552195849169721e-01 | |||
1.752132038065331576e-01 4.061422078489875798e-01 2.293701656882313422e+00 | |||
2.928100157108124435e-01 4.671233284818390175e-01 1.080626851389180132e+00 | |||
7.819621667662703945e-01 8.008189799725141667e-01 3.080348037192132793e+00 | |||
3.690479178198524668e-02 4.992162633659427140e-01 1.440659064292583391e+00 | |||
4.255218965268586784e-01 4.299491437783846681e-01 1.718854994552853732e+00 | |||
6.508378905405213244e-01 5.125476346950736950e-01 1.590678829535723260e+00 | |||
5.072290338100746698e-01 1.864644592367400433e-01 4.459607970314258818e-01 | |||
4.276686579201467442e-01 2.301907304104228169e-01 9.303368310914639228e-01 | |||
3.476699595249291841e-01 8.611313302243546186e-01 3.033742925183159134e+00 | |||
5.490920356870296892e-01 2.171881655774017261e-01 4.104681509594124700e-01 | |||
7.510282078843535380e-01 4.979744383443573952e-01 1.696128527266623331e+00 | |||
3.906216874866195177e-01 1.165731551035269487e-01 1.438532496121395221e-01 | |||
6.050298171092189925e-01 5.186689934233152943e-01 1.479263961155669271e+00 | |||
1.317855929821913907e-01 8.963483353857124580e-01 3.139096945167396679e+00 | |||
1.652283583767804043e-01 5.119575423296794936e-01 8.744003971758654847e-01 | |||
3.282157425301530651e-01 4.670372656905332764e-01 1.516522852725733284e+00 | |||
5.574891210005790132e-01 6.653447637772980094e-02 7.154585981647222370e-01 | |||
7.897971783219093211e-02 9.666951250533402096e-01 2.778803608643247713e+00 | |||
5.317142691264395982e-01 8.525494190186935217e-01 2.189194935042349410e+00 | |||
9.139282861202384645e-01 8.300394780564672725e-01 2.314393252321646166e+00 | |||
7.003936096648044618e-02 3.614369733524737116e-01 1.458722832872220376e+00 | |||
3.329160644636194322e-01 7.612584046270625127e-01 1.728766841894803008e+00 | |||
7.223716278606399088e-01 3.424502960158398723e-01 1.475182310351810244e+00 | |||
7.579486958675392128e-01 1.404431732978919101e-01 7.638063148058644725e-01 | |||
4.879941143072841214e-01 8.776430610393899379e-01 2.590578637109576032e+00 | |||
5.059776587492628153e-01 6.011431791188193152e-01 1.727646528689702032e+00 | |||
1.235219504874963681e-01 1.179922675331297466e-01 2.158455983551904600e-02 | |||
7.725252796637381847e-01 2.402553529530193632e-01 6.088324063553911536e-01 | |||
6.044899109013432970e-01 3.526318875917974216e-01 9.820642697335023596e-01 | |||
2.788842141255436502e-01 9.449528134942402069e-01 2.637117406174539269e+00 | |||
8.124980818899407708e-01 9.135864179859349843e-01 2.976694424729694699e+00 | |||
1.430100926327314559e-01 3.895334755875521360e-01 1.484207112180311761e+00 | |||
1.346184020583984786e-02 9.470172860355422495e-01 2.018323715053156331e+00 | |||
5.315817357323743275e-01 7.364967952142569274e-01 1.340839329042298633e+00 | |||
4.307840966796572246e-02 2.388285466023543080e-02 -1.547847363833975143e-01 | |||
1.108508058799980711e-01 6.309926088062709493e-01 1.846453177067780338e+00 | |||
1.369271739615056660e-01 9.521833918572530919e-02 -1.367807803943321687e-02 | |||
8.709137663984527489e-01 5.830997774574926407e-01 1.470710178479174957e+00 | |||
5.120154121503293077e-01 4.166566820874566535e-01 7.946903526350351799e-01 | |||
1.315315743704883467e-01 6.371999314248233404e-01 2.694171092891537000e+00 | |||
2.153395548126788572e-01 8.908231491211437980e-01 2.548016980999377346e+00 | |||
6.774899744365372456e-01 1.475000211999840660e-01 1.016389505906764912e+00 | |||
7.205521389499305318e-01 5.167944263926570470e-01 1.439864432386541271e+00 | |||
6.298537683329488157e-01 1.103889021762568534e-01 5.788912312050239484e-01 | |||
3.003320757630068005e-01 2.839265518953392187e-01 7.902651533713537235e-01 | |||
1.358787769867572104e-01 5.338502558571581735e-01 1.242159045055135191e+00 | |||
7.211422730895343802e-01 6.832532426805910664e-01 2.427868407009705631e+00 | |||
5.077081666456587650e-01 2.692338638244742288e-01 9.469604095963046753e-01 | |||
9.076515738388084564e-01 2.492821227198076528e-01 3.774681688921412981e-01 | |||
3.057001659291251894e-01 3.716474170584588288e-01 1.981864586654419114e+00 | |||
2.393124933107569907e-01 7.217250161625320404e-01 2.590482589440039618e+00 | |||
2.651245410444329487e-01 3.496052218356904273e-01 1.227039102958267680e+00 | |||
2.431967384773501273e-01 9.985048262565618948e-01 3.235888446285136943e+00 | |||
2.698483691693887021e-02 9.743108744110025032e-01 3.342329366195278428e+00 | |||
2.279608585095488360e-01 8.859818045797949182e-01 2.942464448708598912e+00 | |||
4.483609672705520799e-01 6.670998887972758862e-01 2.076419722111321420e+00 | |||
8.904753906462610225e-01 3.111374055960492990e-01 1.456989638885515248e+00 | |||
1.514761014203253042e-01 1.678783474502699713e-01 1.167981405462522382e+00 | |||
7.520504570944019918e-01 4.375530060069278138e-01 1.481577723376456435e+00 | |||
2.397778092861350130e-01 7.997618535980651755e-01 2.342757484666917911e+00 | |||
2.531206670418394422e-01 4.615506965402542683e-01 1.636592790074103743e+00 | |||
7.435728910834795702e-01 3.736777422149617500e-03 -3.830974855301556015e-01 | |||
3.688643214472042686e-01 1.883043946844410454e-01 5.667628887874931465e-01 | |||
2.618105734985738398e-01 7.634161407858581772e-01 2.196219235036334094e+00 | |||
9.804012704778609644e-01 6.778374519788783470e-01 2.062332573557930537e+00 | |||
4.626539226850950870e-01 2.165404571632910669e-01 6.080610405596087453e-01 | |||
8.806516367168518711e-01 8.681734757726988772e-01 3.521148834689036278e+00 | |||
9.023441618521538254e-01 3.364042065683817695e-01 6.872849186385355802e-01 | |||
4.269233628358508614e-01 8.040080415256076751e-01 2.145076436418656751e+00 | |||
4.600117779487580272e-01 3.367630808640548201e-01 5.949168512934720487e-01 | |||
1.807768509788476585e-03 2.420663221130627818e-01 7.130138029462870231e-01 | |||
4.099820340838236099e-01 7.899034529768680368e-01 2.385191132276404335e+00 | |||
4.849960694889798862e-02 1.982840001315443468e-01 6.301584230118260077e-01 | |||
8.996124640016156526e-01 7.420756892900272073e-01 2.209456858526119838e+00 | |||
1.492016935091921015e-01 2.147096921654393364e-01 1.314425927159785124e+00 | |||
1.361244278677032904e-01 1.899184959262427697e-01 6.815115507105167225e-01 | |||
5.070936213629948508e-01 8.151840969923447311e-01 2.572251842372536235e+00 | |||
4.659745595556220588e-01 9.428226134576906947e-01 3.071537444706702757e+00 | |||
9.429245506623894491e-01 6.629318320075742177e-01 2.498955264337161974e+00 | |||
1.802342851054937745e-01 3.490106428957023832e-01 9.240467110042074328e-01 | |||
8.098958385965702167e-01 1.226346777314701875e-02 -3.309643649656411890e-01 | |||
5.417547087050482713e-01 6.658204838297733241e-01 1.509700168401965970e+00 | |||
9.456770645166309164e-01 3.030826325980775504e-01 1.073362218932768508e+00 | |||
4.844072092975458821e-01 7.530251506190279720e-01 2.253037890330522153e+00 | |||
1.874829847214434242e-01 1.996541089309411543e-02 -3.827100215589169796e-01 | |||
5.894178411794452899e-01 4.522767998056613781e-01 9.605762030309467070e-01 | |||
7.873570305224111276e-01 6.561221564940420414e-01 2.147138860156095674e+00 | |||
1.426527504041810168e-01 9.867494222735424536e-01 3.020454741531790166e+00 | |||
9.391927900435999010e-01 6.852766761662199002e-01 2.251124587022512724e+00 | |||
2.368729390231125720e-01 8.279672581331573022e-01 2.296374797803339085e+00 | |||
7.486337179086945959e-01 6.735218106509133218e-02 -4.227848779936776991e-02 | |||
8.866159120116020587e-01 2.321480186461521988e-02 4.525468707059353557e-01 | |||
8.845205620098687271e-01 8.477203631758970515e-01 2.299410628329696671e+00 | |||
1.569325367885284761e-01 5.465081367442979898e-01 9.567354628390964955e-01 | |||
4.175773650674116100e-01 7.437289433230617197e-01 2.075641144696740881e+00 | |||
6.766508167025210296e-01 1.401104088860956054e-01 8.133522740106857718e-02 | |||
2.437293122123680433e-01 1.741358823378642562e-01 9.869212503030849737e-02 | |||
5.354261087357714555e-02 5.238690518893841519e-01 1.827060912740871679e+00 | |||
8.985079867885620297e-01 4.541127439489960338e-01 1.284689462189573028e+00 | |||
9.859180124719979510e-02 2.425293585860593959e-01 1.794219286375581479e+00 | |||
3.104681315904065375e-01 4.366134028470973760e-01 9.124668454612738788e-01 | |||
2.914313030904192026e-02 3.635290659195751228e-01 6.599022168391472931e-01 | |||
3.850158813867096130e-01 1.568035139584514903e-01 4.341784148718722536e-01 | |||
9.497671415857267263e-02 8.881066740374826685e-01 1.856075846989231337e+00 | |||
4.979692524704951717e-01 3.101180946379102910e-01 1.361135155764561899e+00 | |||
6.715576315226775561e-01 6.363144202920870685e-01 1.740954301332964027e+00 | |||
2.471059521149722160e-01 9.509378395509832105e-01 3.021130562435957678e+00 | |||
5.031370421739999044e-01 3.645016208974644067e-01 1.768883080217330317e+00 | |||
9.759302242105675207e-01 1.773781471130541076e-01 9.632829436705716564e-01 | |||
9.453444859710471437e-01 9.036040116608030637e-01 2.750251783594621191e+00 | |||
4.426946929417193699e-01 8.918920263333366405e-01 2.828866422749941378e+00 | |||
9.111891321942485744e-01 3.253675627557718331e-01 1.231973602741280427e+00 | |||
4.970636523091295222e-01 3.228304762823941410e-02 5.036444899974941158e-01 | |||
3.193557603678188750e-01 3.889202362317200068e-01 1.004191383201526344e+00 | |||
3.485843550016007875e-01 8.262768964751382450e-01 2.565665132595759346e+00 | |||
5.833289580151596043e-01 1.596871640559106575e-01 7.812570651796820753e-01 | |||
4.402007532660496825e-01 5.064557025928083656e-01 1.774856594303265922e+00 | |||
5.628722233494043437e-01 9.995482018959611636e-03 -2.601902637070271918e-01 | |||
2.950397190587025209e-01 1.288467888888046309e-01 7.000016542288067800e-01 | |||
4.125472898302278146e-01 5.288952843785157398e-01 1.681793552162739180e+00 | |||
8.269697309788622830e-01 2.497309294178413630e-01 9.397450583987179140e-01 | |||
5.574931907283097177e-01 3.858638212528101574e-01 1.180284844401800814e+00 | |||
2.215280295953003797e-01 6.958626403994585541e-01 1.731253414679733815e+00 | |||
7.510261583555487563e-01 1.788188634455023518e-01 1.299925429501493923e+00 | |||
2.635068227062731250e-01 5.956180221311812018e-01 1.595594858207789235e+00 | |||
5.746838584203813882e-01 6.007903034197219494e-01 2.482844353550380578e+00 | |||
5.740375370137625888e-01 3.261523485439853376e-01 7.240137689199145354e-01 | |||
8.724587612099971023e-01 8.990685917081793210e-01 1.735138035221868380e+00 | |||
5.785830373398077597e-01 6.128477676944743546e-01 1.987830964497920849e+00 | |||
3.085802115660345457e-01 5.502498020075829999e-01 1.973949401517510527e+00 | |||
5.162006561767278345e-01 3.647541951345616429e-01 7.132521642174485255e-01 | |||
8.616414380258615724e-01 3.072826265633465948e-01 1.136706872178240069e+00 | |||
3.367459218196163784e-01 7.189389091626905426e-01 2.520265143634899729e+00 | |||
9.396466651065726872e-02 6.951274172570279797e-01 1.824043186306287589e+00 | |||
1.065124086653937985e-01 5.595047494642528818e-01 1.298440379292570501e+00 | |||
1.381754126276079075e-01 2.549005463370730418e-01 1.155870242740047127e+00 | |||
1.253495284869140525e-01 9.311165344688681067e-01 3.488381325091368446e+00 | |||
9.178938763431406800e-01 5.723086887159402059e-01 2.382892900012446802e+00 | |||
7.714372705401217889e-01 5.399780067964872199e-01 1.248773051140989132e+00 | |||
5.804936310782647935e-01 1.589546100553630437e-01 9.328016607099975932e-02 | |||
4.427299235563127988e-01 7.233402821966863350e-01 2.006316910364275508e+00 | |||
9.348250418258826633e-01 9.206452652236770673e-01 2.723270530799336786e+00 | |||
3.916080401109484077e-01 6.031447716605714549e-01 2.610184334768270720e+00 | |||
9.743011306032670626e-01 5.991816078526247535e-01 1.947173168675800481e+00 | |||
9.688051256697772784e-01 2.367515763185765731e-01 1.165564742156855793e+00 | |||
7.373234204899167255e-01 7.897627365541004529e-02 -4.823097352893258027e-01 | |||
6.319097362953710606e-01 7.778166682524637610e-01 3.200794474196090977e+00 | |||
8.796209364071548853e-01 3.472706073402113125e-01 1.250685385265472238e+00 | |||
9.980931791489400240e-03 5.344631825592538465e-01 2.151465895779983484e+00 | |||
1.037284470534557412e-01 6.808117120647625420e-01 2.051178607733819437e+00 | |||
7.160352138637519070e-01 5.160321394322857635e-01 2.134852334773936100e+00 | |||
4.220681228603470059e-01 2.297364297620837492e-01 6.227097790945155253e-01 |
@@ -1,500 +0,0 @@ | |||
8.902553502043442091e-01 7.193530285868554586e-03 4.722861935711917702e+00 | |||
5.187697179813973714e-01 6.626770873501095505e-01 2.117691187792796192e+00 | |||
6.324427967744761236e-01 4.150316424384996594e-01 2.702997323313629963e+00 | |||
5.340072258966490626e-01 7.297102861553304010e-01 2.848763845110981130e+00 | |||
7.562907835710708238e-02 3.484983252202819015e-01 5.146583752179294535e-01 | |||
6.459029351260444640e-01 1.788244889218169043e-01 2.483893007426155375e+00 | |||
1.388942812241027447e-01 7.757611799635943939e-01 8.890859526034898819e-01 | |||
5.376807862498653723e-01 6.867758089571460722e-01 2.784178074435446071e+00 | |||
7.935116785508896653e-01 4.153415179689987635e-01 4.285964429548942967e+00 | |||
1.253406552553094633e-01 6.240312870134454570e-01 7.336055984814593423e-01 | |||
2.151430327831104217e-01 8.613651676712985594e-01 7.756012896092056019e-01 | |||
5.991670323278381893e-01 2.579790094794917055e-01 2.761184029097501380e+00 | |||
6.977054736867238649e-02 7.383416443956168917e-01 7.741753201883817548e-01 | |||
9.587867685558741915e-01 2.751722383328303323e-02 4.905116787264796940e+00 | |||
2.817130961862280847e-01 3.875394124534615736e-01 1.252643285526846562e+00 | |||
7.237968908416224645e-01 3.861207672558994997e-01 3.225284852878170128e+00 | |||
2.583450592711957761e-01 3.719396745654124370e-01 1.656527019070396234e+00 | |||
8.765418000733125048e-01 2.969654372399725961e-01 4.737000161453560843e+00 | |||
2.530576672013497008e-01 2.907461634243216908e-01 8.456199138660842607e-01 | |||
8.104234330079270743e-01 2.912753898366352567e-01 3.560448387613745158e+00 | |||
3.132662218424453471e-01 8.104371841944167931e-01 2.379701935140821067e+00 | |||
5.658656150123088091e-01 3.817405960024843337e-01 2.783999692048556795e+00 | |||
9.320637820750818925e-01 2.055182814725818874e-01 4.620336288127786517e+00 | |||
6.968573654044996069e-01 2.551101782246331107e-01 3.703069846077785332e+00 | |||
5.708332849426074329e-01 1.685198486129718676e-01 2.654030427193004638e+00 | |||
3.705122845651063201e-01 9.348592942833078290e-02 1.297476213773902654e+00 | |||
7.970337772561465473e-01 9.127383875190620000e-02 4.005564186422864381e+00 | |||
4.620883032895647391e-01 7.090573460005439310e-01 1.811161547965992380e+00 | |||
1.606139397731517482e-01 6.657830134602090721e-01 1.103191166302748938e+00 | |||
1.500945265064396184e-01 3.770079899236062904e-01 1.175213496736928320e+00 | |||
1.879123817299752641e-01 3.239600575201229127e-03 1.281344024824248251e+00 | |||
9.413500161180935466e-01 5.335380202142231409e-01 5.062085445358230906e+00 | |||
5.461756304730255307e-01 8.901292332727767098e-01 3.493524911885176554e+00 | |||
8.081365858242761657e-01 8.100848584189198620e-01 4.117091361112418468e+00 | |||
3.625810857166555934e-02 4.741567524987179150e-01 3.201028028192264840e-01 | |||
1.739556227654529552e-01 7.803527316212930387e-01 6.499576998325453658e-01 | |||
9.118433002076955507e-01 6.407675739336321508e-01 5.042263269352003086e+00 | |||
9.998784077161502193e-01 7.483976874124339540e-01 5.558845672463395005e+00 | |||
1.406330017950156996e-01 8.131089126127777345e-01 8.529366417981125714e-01 | |||
7.427175211937184773e-01 1.786391147649002420e-04 4.025158756424569440e+00 | |||
8.071400872932689152e-01 3.301877988765022920e-01 4.274905913060541351e+00 | |||
6.132877578731208423e-01 1.388406393319641108e-02 3.188711023622514684e+00 | |||
5.057427384266398596e-01 8.405641769019488452e-01 2.349412155304785976e+00 | |||
2.031797303967974866e-02 4.719344647695818962e-01 3.813620389379724918e-01 | |||
4.736517272410867285e-02 1.534027798909988194e-01 3.593867733048692337e-01 | |||
4.776335617360186747e-01 4.987151799687639109e-02 1.830409779965475714e+00 | |||
1.976771659235542211e-01 8.102491563458151802e-01 1.465082449387908214e+00 | |||
8.945277437407285204e-01 5.392442740263292400e-03 3.947977295918374985e+00 | |||
8.287561257675116755e-01 9.743429561462796373e-01 4.534874261370233306e+00 | |||
1.688380953731634104e-01 4.118891207022779000e-02 1.279418479007899734e+00 | |||
1.448747667435124065e-02 8.478624793678177785e-01 1.227845464298549794e-01 | |||
7.742161855443102647e-01 4.517992793261232931e-01 3.799220840937802990e+00 | |||
6.114291464929603181e-01 7.721238794474678535e-01 2.582738843687039321e+00 | |||
5.332141338542684128e-02 1.811323490623216736e-01 -5.532764692228755443e-01 | |||
9.408886628973710531e-01 1.556565907796476633e-01 4.862013173368588959e+00 | |||
1.867209140598263817e-01 1.414268718940004943e-01 5.030266519738164632e-01 | |||
4.213143040736690992e-01 8.981987503649613291e-01 2.185246694783867127e+00 | |||
9.078690166323477584e-01 2.985901095637112368e-01 4.347341964190630570e+00 | |||
6.110259216367625035e-01 2.740891329508408081e-01 3.112286516279270110e+00 | |||
2.864246944057999844e-01 4.440232541035162850e-01 1.501611354202824433e+00 | |||
7.976000361064515820e-01 9.521214723407201985e-01 4.676176245403160792e+00 | |||
9.395323120171954479e-01 3.682769163024779413e-01 3.837826959829171436e+00 | |||
4.915802305852885468e-01 1.325592992176947149e-01 3.045887408887914205e+00 | |||
6.912135045495396701e-01 9.095021180520060922e-01 3.901784361171318771e+00 | |||
8.365021221710774446e-01 2.330423758527274680e-01 4.692752544016114413e+00 | |||
8.994373430431622518e-01 5.579966443853318081e-01 4.651076766571110355e+00 | |||
8.840047829869890350e-01 7.946302924232317988e-02 4.495470865914159120e+00 | |||
4.870030547843984259e-02 1.130417536936701994e-01 -1.979842822663985258e-01 | |||
2.566144231215066185e-01 8.004514469172895330e-01 1.846141508114071073e+00 | |||
2.939425243328720461e-01 5.546059304307632276e-01 1.672088188764273786e+00 | |||
3.887721728600385118e-01 3.277396764886075786e-01 1.622788204200148465e+00 | |||
2.765333636294917685e-01 9.149313567016718052e-01 1.274653239072587319e+00 | |||
5.173775106636566701e-01 4.149392656687573844e-01 2.246544924727742032e+00 | |||
9.891094556265209725e-01 6.923113664898092878e-01 4.836418038491469673e+00 | |||
3.981472653836684028e-01 4.207151626684387802e-01 2.622178491653474275e+00 | |||
8.827783903913843933e-02 5.398600383984650808e-01 5.854720332511057501e-01 | |||
8.695327093477914771e-01 9.037758084512063084e-01 4.217357887069312916e+00 | |||
9.261141014600784427e-01 8.648792135202629794e-01 4.362203110770590975e+00 | |||
9.612459417162249053e-01 7.050786237077200092e-02 4.819714013999864655e+00 | |||
2.175702969902234551e-01 5.383503237913629391e-01 6.422898183962644225e-01 | |||
1.141591865049933485e-01 3.067751231168288628e-01 9.708775886892598850e-01 | |||
9.272686854457729000e-01 7.215902803912347396e-01 4.912604569584945402e+00 | |||
5.142798403237214266e-02 1.323898218486864176e-01 -2.901656550987544714e-01 | |||
3.477567836222974496e-01 7.258162872275639721e-01 1.499258770144148878e+00 | |||
3.235652288022236034e-01 8.369419714161933088e-01 1.571424800555489609e+00 | |||
7.203814453323311717e-01 4.727506656800010143e-01 3.610536347843415150e+00 | |||
7.960667136507829644e-01 3.151108943578667665e-01 3.752957460338226969e+00 | |||
2.288503628882998520e-01 4.024954251467381949e-01 1.371885357377812920e+00 | |||
4.597628562196365287e-01 8.536134369011283418e-01 2.732575733060945478e+00 | |||
6.347368854302326557e-01 6.357899574546558297e-01 3.034337764857596209e+00 | |||
4.550391785745410145e-01 4.880784332075441823e-01 2.755172191061923570e+00 | |||
1.614857391711226331e-01 4.614475762528154057e-01 1.499379129535866273e+00 | |||
6.643461971390929310e-01 7.954073913580996802e-01 3.531564275970451749e+00 | |||
1.965089225797753691e-01 7.636878482296945991e-01 1.451592487989275648e+00 | |||
1.038971224149078942e-01 3.766159557047549233e-01 -1.836209249054253645e-01 | |||
8.677666738428043702e-01 7.301133471076725057e-01 5.089526743702606382e+00 | |||
2.111611745437268484e-01 6.009916007985899311e-01 1.282591111998210964e+00 | |||
8.165867355173265230e-01 8.691451449107020499e-01 4.150710836202859966e+00 | |||
3.541549885223922445e-01 4.126823205016850737e-01 1.746130910505607536e+00 | |||
2.682537411455077070e-01 5.898466928168949464e-01 1.688764412028580075e+00 | |||
8.679848596269212901e-01 5.608438559562867187e-01 3.835160975999336141e+00 | |||
5.776833370668147394e-01 2.731663343536713251e-01 2.505993440852065657e+00 | |||
8.138648125452317972e-01 3.718807175174709823e-02 4.028457616581778034e+00 | |||
2.282377830458008905e-01 8.730998562571533617e-01 1.516197472423553627e+00 | |||
3.461019532650697617e-01 5.821410153889622352e-01 1.004111206774395804e+00 | |||
3.783886586420059928e-01 8.135627697992697804e-01 2.115310715120200857e+00 | |||
8.534863514661925610e-01 4.892294575400191192e-03 4.447818431098251146e+00 | |||
6.481313117292060166e-01 3.244210515162255781e-01 3.658318972809582625e+00 | |||
4.068552360857179417e-01 1.308692143986797118e-01 1.909640133336931278e+00 | |||
3.470492019114007620e-01 3.248366399862634424e-02 1.229853411392451301e+00 | |||
1.803367765699718239e-01 1.822694920673567642e-01 8.396573726461289411e-01 | |||
5.827091441604549393e-01 9.670590760160453492e-01 2.859802087839336426e+00 | |||
8.866423412642148172e-01 5.889083209276405606e-01 4.575011256779902169e+00 | |||
4.625427554234465832e-01 2.062725334982733472e-01 2.831277898318763420e+00 | |||
7.681228254469802952e-01 3.103104106796082950e-01 4.387633281419768494e+00 | |||
2.526355891537099829e-01 7.617333331965988608e-01 1.114869422139043609e+00 | |||
7.603477278415293750e-01 4.241384240546804962e-01 3.749759653376115054e+00 | |||
6.203407641210935131e-01 7.137261472194282863e-01 3.845803566538283924e+00 | |||
5.460065626420126383e-01 6.961595933740327702e-02 2.607109568577683056e+00 | |||
2.002964571171567121e-01 9.836069085534716594e-01 9.439292093204048051e-01 | |||
4.111579845363828589e-01 4.942143050291614159e-04 2.014214453147192607e+00 | |||
5.645844948925443640e-01 3.598987404824569580e-01 3.443453989379621216e+00 | |||
9.792195162518624318e-01 7.976062421249713319e-01 4.888998610361928066e+00 | |||
3.919576698632000200e-01 8.298716087831260468e-01 2.368435603719456051e+00 | |||
8.273517898490360123e-01 8.978670536590080964e-01 4.394100575666468167e+00 | |||
7.946735660263274381e-01 6.215152458852629680e-01 3.747096732015856801e+00 | |||
5.961957851041439493e-01 2.461206941442136698e-01 3.374475969432942257e+00 | |||
2.859418838668200680e-01 4.253262462006508482e-01 1.236450630546637841e+00 | |||
9.284125059869496877e-02 1.023211363768292692e-01 8.078131097547296413e-01 | |||
9.942014694080807402e-01 4.401475622539808841e-01 4.478781628662761527e+00 | |||
9.660790698231932483e-01 2.157934563130896599e-01 5.139668619561629015e+00 | |||
1.819280091787371934e-01 2.450541968023522976e-02 3.235996722822381821e-01 | |||
6.228574615567123995e-01 3.175805215380118618e-01 3.061935718089351166e+00 | |||
4.106235935497739575e-01 9.932038108499299245e-01 1.527363224316587553e+00 | |||
8.241855016207615403e-01 6.723237130512089710e-01 4.292850669843126354e+00 | |||
4.942330091732183295e-02 9.734433703964097129e-01 8.522371467976199710e-01 | |||
7.928026671381125379e-01 5.377329766501475206e-01 4.269632283568844500e+00 | |||
4.019913504018723938e-01 6.641977194508473792e-01 2.015170053280050499e+00 | |||
6.727365172965377171e-01 9.472694398556646966e-01 3.377089737324597163e+00 | |||
8.471272616565498348e-01 5.881538977075595476e-01 4.392336826747588852e+00 | |||
9.953130974848735102e-01 5.148218292773272253e-01 5.277731189116402710e+00 | |||
6.172057967770711029e-01 5.483888340607580014e-01 3.115421823647429367e+00 | |||
7.667299866969052857e-01 9.708997836540530502e-01 4.815854014705881703e+00 | |||
8.781626806683332509e-01 6.634197225683624577e-02 5.365951496188033687e+00 | |||
4.294200428945138537e-02 6.696048344399376795e-01 -2.275716814686346567e-01 | |||
5.440453006410250758e-01 6.064316601914145899e-01 3.356510781321417003e+00 | |||
1.399761884378478705e-01 3.518806601919229893e-01 3.577695275566450395e-01 | |||
8.617267230190176486e-01 5.817745672790290978e-01 4.380852978567977729e+00 | |||
8.264449990414002301e-02 8.814671293110630801e-01 8.430203107240037408e-01 | |||
3.554315087585453448e-01 1.052810186720425367e-01 1.697177576687770806e+00 | |||
3.434478849249136267e-01 4.693421423115070601e-01 1.634727581041684141e+00 | |||
4.811747232410813480e-01 2.085887053917928613e-01 2.177694796147195966e+00 | |||
7.302922441145309751e-02 5.158455228807825588e-01 -5.309006260347731709e-02 | |||
3.658734359575759321e-01 9.328332684465587832e-01 2.523958506341843044e+00 | |||
3.657999960478349744e-01 4.081630405036833498e-01 2.111385140904039837e+00 | |||
6.443174387789217805e-01 5.491135794316760466e-01 3.366581180589497180e+00 | |||
9.523794802263382264e-01 1.383789433423351367e-01 4.929200293322493209e+00 | |||
4.546913387947764118e-01 9.715774690687235537e-01 1.675694272311694988e+00 | |||
9.968356156113117317e-01 4.849172172090066013e-02 5.499251190168591208e+00 | |||
1.080802422921062211e-01 8.101819144197683897e-01 5.936288997891117081e-01 | |||
9.968880256402596896e-01 6.114623789164482792e-01 5.284518758584743736e+00 | |||
5.632787639975594951e-01 9.298242639666138842e-01 3.107059073342421485e+00 | |||
9.929753879792999616e-01 8.277738257041842296e-01 5.322221163042863523e+00 | |||
2.017873440820810993e-01 3.930041059519143731e-01 1.662831985484818809e+00 | |||
9.988387775047696815e-01 6.406545347041754379e-01 5.024155855036609886e+00 | |||
7.928318114305882292e-01 9.562120145401467042e-01 3.880440662985864986e+00 | |||
8.491150745182682869e-01 3.159146326781765435e-01 4.153138632374417050e+00 | |||
2.538959571592438458e-01 9.152979260463364186e-01 1.425816156136774593e+00 | |||
1.593521171925258439e-01 7.912454811296263379e-01 6.645027624838938385e-01 | |||
4.687429644847865839e-01 6.169269754948423223e-01 2.318841373712335674e+00 | |||
5.917214079785239056e-01 4.910927510555839204e-01 2.613624254253550827e+00 | |||
5.159279263802796223e-01 5.365655117605450108e-02 2.356555547714634269e+00 | |||
6.591863970099155479e-01 6.717663880576866253e-01 3.565002125565192248e+00 | |||
2.353643520705253067e-01 6.117795523900337873e-02 1.053403399203287849e+00 | |||
9.050467773061064980e-01 5.916260158603710018e-01 5.032003229394669930e+00 | |||
5.130829149189519711e-01 7.054695271858724137e-01 3.023972070674382895e+00 | |||
5.946913876118109510e-01 6.759359176617056875e-01 3.668847014476045221e+00 | |||
6.245089498291649388e-01 5.991354645614580532e-01 3.174217498078534483e+00 | |||
9.175513731195615863e-01 2.008496049192866062e-01 4.481042734737560096e+00 | |||
2.627023761581419592e-01 6.756838359786891957e-01 1.962880687582638339e+00 | |||
4.080917878138698995e-01 3.550958257571504850e-01 1.773222645699008559e+00 | |||
6.761975173333450995e-01 7.035843745311997077e-01 3.646215505941157176e+00 | |||
1.213245902031894197e-01 8.304395741316301649e-01 1.428499551425091774e+00 | |||
4.429739754493243753e-01 6.792941621952564013e-01 1.740577045825967062e+00 | |||
8.086469038977305290e-01 2.632471450436880689e-01 3.600809383061207125e+00 | |||
6.793637147200348725e-01 8.130003247485133588e-01 3.578466665421228932e+00 | |||
3.278330562730565889e-01 3.053195962968354715e-01 2.017123323688825742e+00 | |||
5.601724056241701444e-01 9.502574964657128920e-01 3.787256636353711237e+00 | |||
2.812221250012958418e-01 6.256619352802152978e-01 1.354117263602656385e+00 | |||
4.591503551836220165e-01 6.994040558540611796e-01 2.611546123315748424e+00 | |||
1.741841616904893364e-02 1.994594239735637542e-01 2.180155993493790301e-03 | |||
7.947968108101745033e-01 5.809813800307277454e-01 4.108806760165046867e+00 | |||
6.951791123019658292e-01 7.531792040448515024e-03 3.524650861968446680e+00 | |||
8.555011297950287918e-01 6.592057432749437451e-01 4.132370780325799409e+00 | |||
8.561582747701000917e-01 3.718286774329542244e-01 4.512464374466206962e+00 | |||
2.654009300233250990e-01 5.850267012824045265e-01 1.276558037832541403e+00 | |||
2.768625995117022232e-01 3.081426789498873386e-01 1.187842454855385643e+00 | |||
3.941124620889586705e-01 7.509965570437445725e-01 2.507479506634473054e+00 | |||
6.713386114230961432e-01 7.578530303471531226e-02 3.568353984972153725e+00 | |||
7.560506967163682646e-01 1.024499151114731665e-01 3.723865621757185274e+00 | |||
1.806270859597414180e-02 7.109895257280992587e-01 -3.821610113221962646e-03 | |||
5.304541601357893876e-01 5.440232983860595617e-01 2.372159670668812836e+00 | |||
4.083183069007400023e-01 8.747853423214335677e-02 1.282918692687436302e+00 | |||
1.106452590284701110e-01 1.071864798143432607e-01 3.860397439482302628e-01 | |||
1.177711726368863010e-01 9.525478610767630361e-01 7.523116950455114305e-01 | |||
7.032665632909994580e-01 2.710214934413025523e-01 3.381370525555345452e+00 | |||
1.010749651379918568e-01 3.332479537340378162e-01 9.455254057624443709e-01 | |||
1.063975870085455133e-01 6.182755188765054477e-02 4.008501639690126295e-01 | |||
1.496747909510319774e-01 1.796009792503032720e-01 9.225866852352979652e-01 | |||
7.127826032464666950e-01 2.014182235583916736e-03 3.551616246980145775e+00 | |||
1.120445926250559499e-01 7.697964015004733485e-01 4.989883878130709216e-01 | |||
3.480537144688354845e-01 6.064120155352608066e-01 1.700370020689161432e+00 | |||
5.903050320993662448e-01 5.874799979352398616e-01 3.397164990386770889e+00 | |||
2.999418794752777284e-01 4.745115611223454932e-01 9.544355780523869903e-01 | |||
8.161672816912131090e-01 2.376170388099575481e-01 4.245337357280297930e+00 | |||
7.841150676575880940e-01 4.246443328555549179e-02 3.544431586715989013e+00 | |||
1.888468662989832847e-01 1.588353013347186815e-01 1.355757381154146790e+00 | |||
7.833663614903485506e-02 4.261665732398400852e-02 6.109301450525963517e-01 | |||
9.026481770631714641e-01 3.076573390407671971e-01 4.432786615971243016e+00 | |||
2.670964823025856472e-02 4.677940032664674730e-01 -3.801484706112182033e-02 | |||
9.314633957246650775e-01 6.983712531909573862e-01 4.951670684342587947e+00 | |||
5.325406900292606327e-02 3.723933969945265599e-01 4.343157639521054714e-01 | |||
7.344499009926139799e-01 6.992355376368768471e-01 3.846558295644644687e+00 | |||
2.695096457961408953e-01 5.117833729099821038e-01 8.956853997660404465e-01 | |||
1.107737426657140478e-02 7.820856802133314689e-01 3.419652185585412285e-01 | |||
1.266328546384232201e-02 9.737762952534544381e-01 3.208850087732688650e-01 | |||
9.742103929520157246e-01 2.465152312553294811e-01 4.908928922838748932e+00 | |||
9.431512191776294518e-01 5.445640124298737339e-01 4.371405740503699455e+00 | |||
4.685842404870689260e-01 1.940286296095491414e-01 2.960018317863545967e+00 | |||
4.069078704231012811e-01 4.086612490605914250e-01 1.997713291886546783e+00 | |||
8.203293619248421820e-01 4.842311857959981092e-01 4.288152129869915896e+00 | |||
1.655870282244578107e-01 9.263402953726815880e-01 -1.371848231998213929e-01 | |||
8.051375479173161764e-01 5.648739416463132157e-01 3.871323705867104614e+00 | |||
4.958101601140715298e-01 4.550976109219856403e-01 2.342683755862304817e+00 | |||
3.022921738409011239e-01 5.850906911661442056e-01 1.733913846286263771e+00 | |||
8.261508186840310630e-01 5.673235742902436041e-01 4.102463670415864350e+00 | |||
5.050822352032021678e-01 1.688141234528890422e-01 1.991936616609028654e+00 | |||
2.515172464477288816e-01 1.681478490960512939e-01 1.121911802395029190e+00 | |||
5.263810435223783557e-01 8.514030391672555709e-02 2.511142190287897868e+00 | |||
7.740604474300135651e-01 1.710906378902935510e-01 4.069337549008531063e+00 | |||
8.136210720851223543e-01 4.886343242359501016e-01 4.212500435358648154e+00 | |||
4.661160735365488250e-01 7.058440886560580774e-01 3.037085615343088296e+00 | |||
4.242240344439670574e-01 4.237200424056742909e-01 3.328482059929308789e+00 | |||
9.716365604834359404e-01 1.390851225906999389e-01 5.150097438849347675e+00 | |||
1.118418607702701406e-01 8.254801758072631834e-01 7.667690156887928543e-01 | |||
9.040859471999541652e-01 4.915470453646719751e-01 4.760751852537422835e+00 | |||
2.424823162795021192e-01 5.742061383399259533e-01 1.319183405974152956e+00 | |||
3.543391246477066714e-01 6.755905890131740366e-01 1.704998683481461441e+00 | |||
2.135852723294864308e-01 8.904816047166850268e-01 1.442281712505609992e+00 | |||
1.903698799876129000e-01 4.516535191118928871e-01 9.761455706688600964e-01 | |||
3.530153476827597725e-01 5.250488936431743081e-01 1.430971316469179744e+00 | |||
4.277753743395051877e-01 4.628648328438786930e-01 1.878709286548413626e+00 | |||
5.369675195416835356e-01 2.580986218837634238e-01 2.600615011120702480e+00 | |||
2.546915474505969668e-01 7.775789294357958736e-01 9.063075077892018827e-01 | |||
8.729387250977393986e-01 8.663100694350281961e-01 4.798971238209840173e+00 | |||
9.117683396985862831e-01 3.159908020608167556e-01 3.706562475407189527e+00 | |||
7.745796600423543454e-01 4.953520921860106174e-01 3.876554941068059978e+00 | |||
4.177153720805908410e-01 7.170787294717717586e-02 2.269550443272649431e+00 | |||
9.397554111882033823e-01 2.314113511233855114e-01 4.868031352435417958e+00 | |||
6.978906283403238930e-01 6.027190710947856189e-01 3.272065933823683714e+00 | |||
9.867946174870922960e-01 7.554518113060578743e-01 5.158494636624732621e+00 | |||
3.959843884024971672e-02 3.616724225907808066e-01 6.469027443911796738e-01 | |||
9.578756271735796579e-01 3.539823400375810003e-01 5.485371760677108632e+00 | |||
6.152670093561325437e-01 5.115356310380463345e-01 2.754818503780777306e+00 | |||
9.810151687727044489e-01 8.692210052251059249e-02 4.284886585260795577e+00 | |||
2.830662491111537449e-01 7.957951723069504046e-01 1.680928689766612472e+00 | |||
6.034150706678950149e-01 4.126116473219870739e-01 3.784414542578703244e+00 | |||
5.779951770432674163e-01 6.716316996626865432e-01 3.334913580380273856e+00 | |||
6.201337006852551959e-01 4.488529673797322372e-01 3.347552394360280736e+00 | |||
3.476405497810753920e-01 5.062354840316863891e-01 1.983774287074308029e+00 | |||
7.921736986669751790e-01 2.884214658967099165e-02 4.243078657594337777e+00 | |||
1.943616200067197486e-01 4.807304187292147368e-01 1.571193852163393245e+00 | |||
8.452039072298875855e-01 9.667501309157785494e-01 5.063179028495235379e+00 | |||
9.935088542838369507e-01 8.305038283104202446e-01 5.408675447077476939e+00 | |||
7.490785446071297482e-01 9.481095336535100282e-01 4.147884154780753008e+00 | |||
9.325198926247071363e-01 4.952448235763340367e-02 4.747077465701705634e+00 | |||
6.681437517817250260e-01 9.327321958943725067e-01 3.118946774472070693e+00 | |||
9.738751147927149354e-02 2.117023718252546427e-01 -2.727828504450833247e-01 | |||
3.401885988134277783e-01 7.137427129138051995e-01 1.141184534163143027e+00 | |||
9.112289752867522852e-01 9.614208039275407014e-01 5.660917284666649785e+00 | |||
1.703223790937545834e-01 3.280258260817743299e-01 7.837825596359903058e-01 | |||
3.908042827512103745e-02 9.518434040550575181e-01 3.677968104146689399e-02 | |||
3.039489810078243037e-01 5.120114777313169441e-01 9.805833554165650501e-01 | |||
6.380574901082720629e-01 4.867615974021388681e-01 3.109859668341811378e+00 | |||
6.342403097055698824e-01 7.546286985064016850e-01 3.397502497295331381e+00 | |||
5.638698020701464753e-01 8.923442495380846662e-01 2.369266542220243910e+00 | |||
3.575636917065314613e-01 1.547211110349390184e-01 1.292296987015075782e+00 | |||
6.303122673573039458e-01 3.083555354371561030e-01 2.616369001864625954e+00 | |||
5.210461831411086209e-01 6.749954577903509723e-01 1.975368681996582998e+00 | |||
5.942703722928894816e-01 4.871116855642279031e-01 3.718527256418860283e+00 | |||
3.533922613993187856e-01 8.017186938272113927e-01 2.050415758601479066e+00 | |||
7.949563516492000881e-01 7.569940514803505938e-01 4.381287648780601174e+00 | |||
8.501907066323004525e-01 7.393984740440502357e-01 4.919093916773214836e+00 | |||
9.200298009768502761e-01 9.938852717343175103e-01 4.259044777638105295e+00 | |||
8.644560207454676792e-01 9.975330340514554361e-01 4.682193981225292845e+00 | |||
1.088519527208741122e-01 1.963812665577030492e-01 1.070733100207510891e+00 | |||
4.556158466394802931e-01 4.899313216767148260e-01 2.769691407307627529e+00 | |||
2.974315517605302661e-01 8.173500794692589722e-01 1.735941272044910555e+00 | |||
2.396113243268572779e-01 3.906404844484276939e-01 1.406972990200790807e+00 | |||
8.100658765102481951e-01 2.648888092432173380e-01 4.658400777557799799e+00 | |||
4.277788414474006373e-01 4.022644362132115070e-01 2.772478440852416792e+00 | |||
5.822175634400963418e-01 8.206669741395847817e-01 3.404000987263741784e+00 | |||
7.125901857884929802e-01 5.928688878268014717e-01 2.746991602948760658e+00 | |||
2.979836332894291173e-01 3.986253846989663563e-01 1.703858007849015976e+00 | |||
7.997698902784957653e-01 4.059258984225663225e-01 3.534310292757433025e+00 | |||
4.522283778821561961e-01 8.710208434442369141e-01 3.019092050051808584e+00 | |||
7.581376719297696631e-01 8.220998888792908632e-01 4.418695479960300432e+00 | |||
1.972428942832202825e-01 1.154221742223087155e-01 8.425430296432390165e-01 | |||
6.223627999722493032e-01 7.314930526686147516e-02 2.314344759304361787e+00 | |||
5.808300754248821862e-01 6.781466720147005800e-01 2.602975187488947739e+00 | |||
9.576552439612673329e-02 9.928949432726992264e-01 5.030806356841338589e-01 | |||
4.100191968015015620e-01 1.766477706761815369e-02 2.199085868051550374e+00 | |||
2.892753633894417087e-01 5.249937301651120825e-01 1.510928050123241828e+00 | |||
5.480326803984194584e-01 6.765224924869054357e-01 2.256073738015950347e+00 | |||
4.653862840236095977e-01 1.475417366536124764e-01 1.938120603143478560e+00 | |||
1.481108453258558333e-01 4.675039260192945800e-01 1.311026471697977147e+00 | |||
1.299439650181278827e-01 1.994133519847534064e-01 5.454775900606607664e-01 | |||
1.106819435806695173e-01 7.764336624926136965e-01 9.666599147513424972e-01 | |||
5.566539743940633889e-01 8.994534645058296363e-01 2.927366595661727633e+00 | |||
9.067406463349181323e-02 8.621879052072318750e-01 8.315219299702474309e-01 | |||
6.938256790006662733e-01 6.065923110957928133e-01 3.284626404618808948e+00 | |||
8.614276617758029886e-01 2.475854633144438655e-01 4.680055810944661943e+00 | |||
2.167196015288621602e-01 7.132989102141332749e-01 1.149718497607195244e+00 | |||
8.413586552623651960e-01 5.516499469071780570e-01 3.311758835415347768e+00 | |||
3.907179200432292498e-01 4.357109683062667660e-01 2.274073788004209895e+00 | |||
1.589735395931844986e-01 8.208887640617014148e-01 1.015565884492410476e+00 | |||
9.613965492088401010e-01 4.809474690694466315e-01 5.157947985083261244e+00 | |||
8.297755466368100796e-01 1.644098554161677050e-01 3.586618888242581349e+00 | |||
8.388098815227268412e-01 4.476810509068985811e-01 5.040051653054658409e+00 | |||
2.279856297891573735e-01 2.433050868928676724e-01 1.550905751489106921e+00 | |||
7.893810651517562116e-01 3.221654836020699797e-01 3.997764308421166035e+00 | |||
6.561675210916022571e-01 4.351741126163224394e-01 3.039802858379251571e+00 | |||
5.740090150255279244e-01 5.948497497584274152e-01 2.908245846269567725e+00 | |||
7.328360080277063204e-01 2.134127006746993072e-01 4.568117657375682761e+00 | |||
4.946637824629790403e-01 9.510041338062535932e-02 1.822473122426795378e+00 | |||
8.627953936010578362e-01 7.932255036082680455e-02 4.492409540273036406e+00 | |||
2.082514147474257005e-01 4.924490887581506326e-01 1.043094182591048469e+00 | |||
6.138460921037049989e-01 4.272596946961463305e-01 2.219129657444722348e+00 | |||
9.252039330093201341e-01 3.945180092044131559e-01 4.436142674559070187e+00 | |||
1.821449462205293468e-01 3.871847528554943629e-01 1.422428385194712819e+00 | |||
4.041894486358277305e-02 4.730473946927595819e-01 -3.012226300897136166e-02 | |||
3.636686652037771639e-01 5.873537929400659552e-01 1.851504192678116612e+00 | |||
6.978650572978981614e-01 3.882536807361431919e-01 3.060444352409822155e+00 | |||
1.078527797771136054e-01 6.957852579382399760e-01 -6.518479046929900322e-01 | |||
9.325503438800145650e-01 2.143804208718624871e-02 4.610707268249928603e+00 | |||
6.663415478668331948e-01 9.281821628240296551e-01 3.002954918326525835e+00 | |||
4.813244567031937926e-02 7.079686934907392093e-02 6.160699011960787397e-01 | |||
2.257010966384187522e-02 7.687909081325765559e-01 4.691271534502503560e-01 | |||
6.061116229161606084e-01 4.191344814402613306e-01 3.747818547247798282e+00 | |||
4.655677615800267732e-01 5.827148457971130791e-01 2.376212361555217978e+00 | |||
2.825891342108899362e-02 3.532841522503780141e-01 6.620932360305411546e-01 | |||
6.009826152607300465e-02 7.905021321943074275e-01 3.638498600086091939e-01 | |||
8.827068713844454795e-02 4.438307876100586746e-01 5.808743778595876206e-01 | |||
7.991261876646593709e-01 7.854151510467033415e-02 4.204719952423675799e+00 | |||
1.189838444905066961e-01 7.416370166532364472e-01 8.322146103945595730e-01 | |||
6.225837692759601527e-01 6.310620232117586381e-01 3.453154489543852623e+00 | |||
7.311956651166588994e-01 3.168642488438819793e-01 4.352727674411122294e+00 | |||
5.756564763243883220e-01 2.828406554721885069e-01 3.947408034545492939e+00 | |||
9.382055486044094117e-01 7.168147601660477264e-01 4.948352770080884255e+00 | |||
9.546660828884934169e-02 1.437744534916255423e-01 4.844118257903424207e-01 | |||
1.151229123196606530e-02 5.435892212943771451e-02 -7.713427384278788157e-01 | |||
4.289306534746354371e-01 7.585115118695228142e-01 1.672943511274438855e+00 | |||
2.720841418364485786e-01 1.683401564116593363e-01 9.880622913101808624e-01 | |||
9.553521920108963839e-01 5.848667324267601275e-01 4.808516969944884423e+00 | |||
9.560671618377164505e-01 5.181438446082220484e-01 4.933951664828871486e+00 | |||
5.196770028592577750e-01 7.306585795529608740e-01 3.451093432477776446e+00 | |||
5.349779123318809670e-02 2.328861462583488029e-01 7.704348747284186505e-01 | |||
3.859630940832011747e-01 9.789769759429359786e-01 2.074017873792239453e+00 | |||
7.490205155897455835e-01 3.267712126974233744e-01 3.651412368265581243e+00 | |||
1.668441368245933143e-01 9.133330140609380310e-01 1.243613443703563526e+00 | |||
2.205754665833413730e-01 7.832697007249476151e-01 8.595611522198556287e-01 | |||
9.404829422756235680e-01 9.929289525804722016e-01 5.870103925445858906e+00 | |||
1.885613455195433996e-01 9.627881098859110143e-01 3.800423621122401840e-01 | |||
2.979547345918899248e-01 4.730276887758750881e-01 1.935528123129407918e+00 | |||
8.519970016958130499e-01 5.943420665775437373e-01 4.310612038812805302e+00 | |||
7.865511680844923248e-01 9.416708270931484215e-02 3.582760101894609139e+00 | |||
3.547299087599004030e-01 8.445259368683057932e-01 2.127252690320021067e+00 | |||
8.516151544949630559e-01 8.147740388614883589e-01 4.810189857293523552e+00 | |||
9.837521205417909531e-01 9.499682533043930510e-01 4.754976751498949383e+00 | |||
3.737339448736265446e-02 5.689895437011959212e-01 -1.117927867466818603e-01 | |||
1.248452393691709128e-01 3.108682762971861635e-01 3.059567719721493773e-01 | |||
8.485331105413984609e-02 8.067137657807001982e-01 7.644506990207884378e-01 | |||
8.493746129016015445e-01 9.598202598405187747e-01 4.648012803649603342e+00 | |||
1.661884326471347872e-01 9.545306174615980632e-01 1.210655276442758055e+00 | |||
2.115569588053255456e-01 7.917981136715546153e-01 4.492708813379883148e-01 | |||
7.973993318061458613e-01 9.554658728242193533e-02 4.119536471100667008e+00 | |||
4.199405150546330345e-01 3.148719730595287425e-01 1.236848839470887640e+00 | |||
6.762135448160583318e-01 5.604047250520123313e-01 2.910528104527299931e+00 | |||
1.793503471079430645e-01 6.921908683206966506e-01 1.611537779389007552e+00 | |||
3.393629583105367553e-01 7.712311693730592443e-01 1.848155157842310992e+00 | |||
8.362277165385599842e-01 8.309138865141540986e-01 4.206771899349675436e+00 | |||
7.851254371836934309e-01 3.006834116028120896e-01 3.505872113422314307e+00 | |||
4.007973546322256553e-01 8.691050993207729558e-01 2.328489728285702220e+00 | |||
8.735930802668734341e-01 6.962474167713060824e-01 3.580318220057124634e+00 | |||
5.871151497198540126e-01 1.153138340802493111e-01 2.539780065278996801e+00 | |||
6.462095801076146451e-01 1.281924764607952349e-01 4.137574844925852702e+00 | |||
9.236416657093560190e-01 7.178359759200697576e-01 4.226289114208362285e+00 | |||
9.194376552125794744e-01 3.121869331310231921e-01 5.213485048810325928e+00 | |||
4.384694901432926173e-01 8.387390970367403220e-02 2.743511503146341290e+00 | |||
7.152802840844866283e-01 2.143176360763772159e-01 3.858880502287514336e+00 | |||
7.475678434845520748e-01 2.674533934032751192e-01 3.273610117647138384e+00 | |||
3.411257136223635467e-01 6.670886650940466289e-02 1.864105062920903277e+00 | |||
2.132768386275984973e-02 9.256663982227738563e-01 -1.163649561141600342e-01 | |||
5.897306418192969080e-01 7.003134124765836299e-01 3.359490726121215687e+00 | |||
4.802798226410556204e-01 6.483683637597190685e-01 1.993783201669373506e+00 | |||
8.176621480896004712e-01 1.043603729430799820e-01 4.091295839545188606e+00 | |||
7.855212575202087377e-01 7.157119340511944872e-01 4.507042197180223475e+00 | |||
5.684528941022698456e-01 2.856354236553743098e-01 2.437751537534779800e+00 | |||
9.930883471075108160e-01 7.712736516485831917e-01 4.929021629209715982e+00 | |||
6.530624944143862409e-01 8.343149271709339176e-01 2.913924188638919777e+00 | |||
5.819676274191801779e-01 2.540258043319393311e-01 2.757011911636317869e+00 | |||
4.486322285661017428e-01 3.548077819389158227e-01 2.008724623551346511e+00 | |||
5.337060915547677808e-01 2.134512399881538514e-01 2.045276372596628978e+00 | |||
5.266467762777323758e-01 9.431487670339336882e-01 2.872547719207049433e+00 | |||
8.101758718861310493e-01 9.025539622773742776e-01 4.449909344161872227e+00 | |||
7.699639056639613344e-01 9.683912061347941247e-01 4.472990741094066358e+00 | |||
9.233466972407796680e-01 5.286249270385992016e-01 3.977771758806475155e+00 | |||
8.623248714303998197e-01 1.334532827503559504e-01 4.578051879720754336e+00 | |||
2.045649072490571818e-01 8.511360791325676134e-01 1.622138715515219820e+00 | |||
8.808425032518121256e-01 3.278457833231640528e-02 4.767460771953611953e+00 | |||
1.747261335267188409e-01 5.208159514249149913e-01 9.107489734808142945e-01 | |||
9.038321562587570135e-02 5.097515707018624997e-01 8.679635110003780518e-01 | |||
2.667337674975465145e-01 3.395183568046972189e-01 3.954282994748328228e-01 | |||
8.942288885142990473e-01 1.218536601033595179e-01 4.516544786048811488e+00 | |||
2.495138014351878653e-01 3.362240006002403803e-02 7.274186371739601586e-01 | |||
6.632655291795690466e-01 1.051626646997456671e-01 3.366351748724312198e+00 | |||
4.636131384771519093e-01 7.391689207887839341e-01 9.190924064812198147e-01 | |||
9.849727748399310645e-01 4.105825659188074850e-01 5.081535606333690502e+00 | |||
8.200513987740756239e-01 7.870989941024173486e-01 5.105767638351050408e+00 | |||
8.345258582652974599e-01 6.504287753442997699e-01 4.469356354417690191e+00 | |||
5.530559412316298218e-01 3.014739582304205578e-01 2.221987665263402967e+00 | |||
5.728205136550856835e-01 6.961249314381353637e-01 3.332064394953069275e+00 | |||
7.728992910594711940e-02 7.465071614300229363e-01 5.072220713377015855e-01 | |||
3.553351520590779522e-01 5.618566588746031210e-01 1.805773594922777336e+00 | |||
7.780093699322522260e-01 6.370279516433825506e-01 3.796003212735603594e+00 | |||
4.200421460868658530e-01 8.648496031052255173e-02 2.073022179648955188e+00 | |||
7.015632316649023092e-01 6.484387924575446549e-01 3.559606914505504260e+00 | |||
2.405632780665224457e-01 3.195669345676046547e-01 7.324100489677702885e-01 | |||
3.311074230748540792e-01 8.685757534288673920e-02 1.123436154899798867e+00 | |||
3.998601941346905386e-01 5.552870830766860610e-01 1.915775874339034512e+00 | |||
3.912244127302044738e-01 9.760058205599845271e-01 1.832042737206649452e+00 | |||
7.545640111214302337e-01 6.239422861366119566e-01 3.933844589523757929e+00 | |||
1.731020325733930720e-01 8.307779734406610661e-01 1.584837322370275192e+00 | |||
9.140779443033931750e-01 1.761632572799414209e-01 4.635617983734372949e+00 | |||
6.406597700159744058e-01 4.449920268218388042e-02 3.619782604199833553e+00 | |||
1.801895938347751747e-01 2.885096528520691983e-02 9.253929900355385429e-01 | |||
3.038534282472222792e-01 8.109008691606200969e-02 1.931362524069328845e+00 | |||
2.364111047450501157e-01 9.621771163112075032e-01 1.175931679959700382e+00 | |||
1.888579567058331632e-01 2.989827457994593818e-01 1.331691847396279549e+00 | |||
4.017801235588650544e-01 5.746849052510728528e-01 2.284038251179917545e+00 | |||
7.691213425655432090e-01 3.395055574798373499e-01 3.100343064061910958e+00 | |||
6.550934968984141182e-01 3.720036956270784678e-01 3.491089545507741754e+00 | |||
4.023686635365519848e-02 9.581326531584744011e-01 3.637448978915412967e-01 | |||
6.285091574769069434e-01 9.827706180286350790e-01 3.274648861414132028e+00 | |||
7.868202014082179563e-01 5.408929245820974785e-01 4.063026715844942238e+00 | |||
3.910870713585677949e-01 4.115275676678212813e-01 1.977382791230766657e+00 | |||
4.810888481033770425e-01 2.335910309083686354e-01 2.112886227322197108e+00 | |||
9.483244100044019298e-02 3.605432105348165273e-01 2.671379064678270909e-01 | |||
3.615132104900360410e-01 8.959298980423738845e-01 1.439109578489957020e+00 | |||
3.496088798675596188e-01 1.159868538215188538e-01 1.100416491082838011e+00 | |||
1.752584989807870475e-01 8.500433679768688577e-01 9.456504046667791874e-01 | |||
5.696541721886292375e-01 2.719451527203955443e-01 3.207022932205441546e+00 | |||
1.257906969503060912e-01 5.102860482935389630e-01 4.391573016080443370e-01 | |||
9.659180041815862428e-02 6.949380529845092802e-01 7.072119294438530268e-01 | |||
2.054150544247815846e-01 5.882009510622373538e-01 5.779033712916188392e-01 | |||
2.210044142168009484e-01 4.618615594701123150e-01 9.140878975298083464e-01 | |||
4.154650354296188786e-01 4.045679816906707638e-01 2.198907643480010332e+00 | |||
7.371618606847257782e-01 3.338129400401774749e-01 3.245693217291083954e+00 | |||
3.937950341162077539e-01 2.663433002832898966e-01 1.386067558419330936e+00 | |||
3.114390166515048630e-01 6.835068649679199027e-01 1.388705802372090403e+00 | |||
2.969984169267261276e-01 2.167207871474979841e-01 1.225921547211908269e+00 | |||
8.665887431269525543e-01 2.719244077785530900e-01 4.962370196944209333e+00 | |||
5.920168417723699061e-01 5.686248008396056486e-01 3.691946248409955444e+00 | |||
7.039049387710701877e-01 9.766367763810511748e-01 4.356350940590338006e+00 | |||
3.805298246038364418e-01 8.410053556422516507e-01 1.971952595700146027e+00 | |||
2.261698816689344804e-01 5.125989413008203988e-01 4.317032515586423091e-01 | |||
8.080180552046107856e-01 1.312926632015042339e-01 3.368419745332182469e+00 | |||
7.284160939898801645e-01 5.059757554471627783e-01 4.317314105858608642e+00 | |||
4.129099719860259698e-01 1.671213862046826426e-01 2.035668918620091805e+00 | |||
6.233205366576620721e-01 6.614357814101893274e-01 3.208310246239634456e+00 | |||
8.109529878867841601e-01 3.114791270951091651e-01 4.178127503612525828e+00 | |||
5.568666377962800951e-01 7.908161752846655235e-01 3.274160032551334254e+00 | |||
6.584994598486142436e-01 7.441165751555391950e-01 3.849476891377942245e+00 | |||
8.838710503302456001e-01 1.582292539619044591e-01 4.073499503581468062e+00 | |||
7.647528413413395842e-02 9.997612698905589124e-01 2.572544553349556118e-01 | |||
8.560337057837326125e-01 1.906169904574519514e-01 3.681701253116876682e+00 | |||
4.132389782657139854e-01 6.875371741120974711e-01 2.352924830879973062e+00 | |||
5.916647208438258199e-01 6.878460327662490048e-01 3.104853608287138744e+00 | |||
2.782180868122997586e-01 6.776851436298622078e-01 1.761580580541214802e+00 | |||
9.266054863421087084e-01 2.627492879721394781e-01 4.628763757826364511e+00 | |||
5.523392520960408447e-01 6.644924434656053203e-01 3.125174246926364141e+00 | |||
5.287394139091310397e-01 6.394872222310177268e-02 3.117993250820441098e+00 | |||
3.686558096442410504e-01 6.268691773262476952e-01 1.936875223460121642e+00 | |||
9.508303497338959076e-01 2.603495644055497937e-01 5.539503503356506542e+00 | |||
4.974170422761559074e-02 5.234160700859691318e-01 9.854439982145415389e-01 | |||
9.343567329844875147e-01 1.007677945281176823e-01 5.049580527766805105e+00 | |||
3.951543969490975972e-01 5.731481704547733980e-01 2.435359065439732795e+00 | |||
2.593456023940206023e-01 9.908603604474142124e-02 1.265386858376813972e+00 | |||
1.945948991403323447e-03 7.928332931112429538e-01 -7.151524026985970339e-01 | |||
8.014805253134763552e-01 6.015755345876689919e-01 4.634065213613259182e+00 |
@@ -1,699 +0,0 @@ | |||
1000025,5,1,1,1,2,1,3,1,1,2 | |||
1002945,5,4,4,5,7,10,3,2,1,2 | |||
1015425,3,1,1,1,2,2,3,1,1,2 | |||
1016277,6,8,8,1,3,4,3,7,1,2 | |||
1017023,4,1,1,3,2,1,3,1,1,2 | |||
1017122,8,10,10,8,7,10,9,7,1,4 | |||
1018099,1,1,1,1,2,10,3,1,1,2 | |||
1018561,2,1,2,1,2,1,3,1,1,2 | |||
1033078,2,1,1,1,2,1,1,1,5,2 | |||
1033078,4,2,1,1,2,1,2,1,1,2 | |||
1035283,1,1,1,1,1,1,3,1,1,2 | |||
1036172,2,1,1,1,2,1,2,1,1,2 | |||
1041801,5,3,3,3,2,3,4,4,1,4 | |||
1043999,1,1,1,1,2,3,3,1,1,2 | |||
1044572,8,7,5,10,7,9,5,5,4,4 | |||
1047630,7,4,6,4,6,1,4,3,1,4 | |||
1048672,4,1,1,1,2,1,2,1,1,2 | |||
1049815,4,1,1,1,2,1,3,1,1,2 | |||
1050670,10,7,7,6,4,10,4,1,2,4 | |||
1050718,6,1,1,1,2,1,3,1,1,2 | |||
1054590,7,3,2,10,5,10,5,4,4,4 | |||
1054593,10,5,5,3,6,7,7,10,1,4 | |||
1056784,3,1,1,1,2,1,2,1,1,2 | |||
1057013,8,4,5,1,2,?,7,3,1,4 | |||
1059552,1,1,1,1,2,1,3,1,1,2 | |||
1065726,5,2,3,4,2,7,3,6,1,4 | |||
1066373,3,2,1,1,1,1,2,1,1,2 | |||
1066979,5,1,1,1,2,1,2,1,1,2 | |||
1067444,2,1,1,1,2,1,2,1,1,2 | |||
1070935,1,1,3,1,2,1,1,1,1,2 | |||
1070935,3,1,1,1,1,1,2,1,1,2 | |||
1071760,2,1,1,1,2,1,3,1,1,2 | |||
1072179,10,7,7,3,8,5,7,4,3,4 | |||
1074610,2,1,1,2,2,1,3,1,1,2 | |||
1075123,3,1,2,1,2,1,2,1,1,2 | |||
1079304,2,1,1,1,2,1,2,1,1,2 | |||
1080185,10,10,10,8,6,1,8,9,1,4 | |||
1081791,6,2,1,1,1,1,7,1,1,2 | |||
1084584,5,4,4,9,2,10,5,6,1,4 | |||
1091262,2,5,3,3,6,7,7,5,1,4 | |||
1096800,6,6,6,9,6,?,7,8,1,2 | |||
1099510,10,4,3,1,3,3,6,5,2,4 | |||
1100524,6,10,10,2,8,10,7,3,3,4 | |||
1102573,5,6,5,6,10,1,3,1,1,4 | |||
1103608,10,10,10,4,8,1,8,10,1,4 | |||
1103722,1,1,1,1,2,1,2,1,2,2 | |||
1105257,3,7,7,4,4,9,4,8,1,4 | |||
1105524,1,1,1,1,2,1,2,1,1,2 | |||
1106095,4,1,1,3,2,1,3,1,1,2 | |||
1106829,7,8,7,2,4,8,3,8,2,4 | |||
1108370,9,5,8,1,2,3,2,1,5,4 | |||
1108449,5,3,3,4,2,4,3,4,1,4 | |||
1110102,10,3,6,2,3,5,4,10,2,4 | |||
1110503,5,5,5,8,10,8,7,3,7,4 | |||
1110524,10,5,5,6,8,8,7,1,1,4 | |||
1111249,10,6,6,3,4,5,3,6,1,4 | |||
1112209,8,10,10,1,3,6,3,9,1,4 | |||
1113038,8,2,4,1,5,1,5,4,4,4 | |||
1113483,5,2,3,1,6,10,5,1,1,4 | |||
1113906,9,5,5,2,2,2,5,1,1,4 | |||
1115282,5,3,5,5,3,3,4,10,1,4 | |||
1115293,1,1,1,1,2,2,2,1,1,2 | |||
1116116,9,10,10,1,10,8,3,3,1,4 | |||
1116132,6,3,4,1,5,2,3,9,1,4 | |||
1116192,1,1,1,1,2,1,2,1,1,2 | |||
1116998,10,4,2,1,3,2,4,3,10,4 | |||
1117152,4,1,1,1,2,1,3,1,1,2 | |||
1118039,5,3,4,1,8,10,4,9,1,4 | |||
1120559,8,3,8,3,4,9,8,9,8,4 | |||
1121732,1,1,1,1,2,1,3,2,1,2 | |||
1121919,5,1,3,1,2,1,2,1,1,2 | |||
1123061,6,10,2,8,10,2,7,8,10,4 | |||
1124651,1,3,3,2,2,1,7,2,1,2 | |||
1125035,9,4,5,10,6,10,4,8,1,4 | |||
1126417,10,6,4,1,3,4,3,2,3,4 | |||
1131294,1,1,2,1,2,2,4,2,1,2 | |||
1132347,1,1,4,1,2,1,2,1,1,2 | |||
1133041,5,3,1,2,2,1,2,1,1,2 | |||
1133136,3,1,1,1,2,3,3,1,1,2 | |||
1136142,2,1,1,1,3,1,2,1,1,2 | |||
1137156,2,2,2,1,1,1,7,1,1,2 | |||
1143978,4,1,1,2,2,1,2,1,1,2 | |||
1143978,5,2,1,1,2,1,3,1,1,2 | |||
1147044,3,1,1,1,2,2,7,1,1,2 | |||
1147699,3,5,7,8,8,9,7,10,7,4 | |||
1147748,5,10,6,1,10,4,4,10,10,4 | |||
1148278,3,3,6,4,5,8,4,4,1,4 | |||
1148873,3,6,6,6,5,10,6,8,3,4 | |||
1152331,4,1,1,1,2,1,3,1,1,2 | |||
1155546,2,1,1,2,3,1,2,1,1,2 | |||
1156272,1,1,1,1,2,1,3,1,1,2 | |||
1156948,3,1,1,2,2,1,1,1,1,2 | |||
1157734,4,1,1,1,2,1,3,1,1,2 | |||
1158247,1,1,1,1,2,1,2,1,1,2 | |||
1160476,2,1,1,1,2,1,3,1,1,2 | |||
1164066,1,1,1,1,2,1,3,1,1,2 | |||
1165297,2,1,1,2,2,1,1,1,1,2 | |||
1165790,5,1,1,1,2,1,3,1,1,2 | |||
1165926,9,6,9,2,10,6,2,9,10,4 | |||
1166630,7,5,6,10,5,10,7,9,4,4 | |||
1166654,10,3,5,1,10,5,3,10,2,4 | |||
1167439,2,3,4,4,2,5,2,5,1,4 | |||
1167471,4,1,2,1,2,1,3,1,1,2 | |||
1168359,8,2,3,1,6,3,7,1,1,4 | |||
1168736,10,10,10,10,10,1,8,8,8,4 | |||
1169049,7,3,4,4,3,3,3,2,7,4 | |||
1170419,10,10,10,8,2,10,4,1,1,4 | |||
1170420,1,6,8,10,8,10,5,7,1,4 | |||
1171710,1,1,1,1,2,1,2,3,1,2 | |||
1171710,6,5,4,4,3,9,7,8,3,4 | |||
1171795,1,3,1,2,2,2,5,3,2,2 | |||
1171845,8,6,4,3,5,9,3,1,1,4 | |||
1172152,10,3,3,10,2,10,7,3,3,4 | |||
1173216,10,10,10,3,10,8,8,1,1,4 | |||
1173235,3,3,2,1,2,3,3,1,1,2 | |||
1173347,1,1,1,1,2,5,1,1,1,2 | |||
1173347,8,3,3,1,2,2,3,2,1,2 | |||
1173509,4,5,5,10,4,10,7,5,8,4 | |||
1173514,1,1,1,1,4,3,1,1,1,2 | |||
1173681,3,2,1,1,2,2,3,1,1,2 | |||
1174057,1,1,2,2,2,1,3,1,1,2 | |||
1174057,4,2,1,1,2,2,3,1,1,2 | |||
1174131,10,10,10,2,10,10,5,3,3,4 | |||
1174428,5,3,5,1,8,10,5,3,1,4 | |||
1175937,5,4,6,7,9,7,8,10,1,4 | |||
1176406,1,1,1,1,2,1,2,1,1,2 | |||
1176881,7,5,3,7,4,10,7,5,5,4 | |||
1177027,3,1,1,1,2,1,3,1,1,2 | |||
1177399,8,3,5,4,5,10,1,6,2,4 | |||
1177512,1,1,1,1,10,1,1,1,1,2 | |||
1178580,5,1,3,1,2,1,2,1,1,2 | |||
1179818,2,1,1,1,2,1,3,1,1,2 | |||
1180194,5,10,8,10,8,10,3,6,3,4 | |||
1180523,3,1,1,1,2,1,2,2,1,2 | |||
1180831,3,1,1,1,3,1,2,1,1,2 | |||
1181356,5,1,1,1,2,2,3,3,1,2 | |||
1182404,4,1,1,1,2,1,2,1,1,2 | |||
1182410,3,1,1,1,2,1,1,1,1,2 | |||
1183240,4,1,2,1,2,1,2,1,1,2 | |||
1183246,1,1,1,1,1,?,2,1,1,2 | |||
1183516,3,1,1,1,2,1,1,1,1,2 | |||
1183911,2,1,1,1,2,1,1,1,1,2 | |||
1183983,9,5,5,4,4,5,4,3,3,4 | |||
1184184,1,1,1,1,2,5,1,1,1,2 | |||
1184241,2,1,1,1,2,1,2,1,1,2 | |||
1184840,1,1,3,1,2,?,2,1,1,2 | |||
1185609,3,4,5,2,6,8,4,1,1,4 | |||
1185610,1,1,1,1,3,2,2,1,1,2 | |||
1187457,3,1,1,3,8,1,5,8,1,2 | |||
1187805,8,8,7,4,10,10,7,8,7,4 | |||
1188472,1,1,1,1,1,1,3,1,1,2 | |||
1189266,7,2,4,1,6,10,5,4,3,4 | |||
1189286,10,10,8,6,4,5,8,10,1,4 | |||
1190394,4,1,1,1,2,3,1,1,1,2 | |||
1190485,1,1,1,1,2,1,1,1,1,2 | |||
1192325,5,5,5,6,3,10,3,1,1,4 | |||
1193091,1,2,2,1,2,1,2,1,1,2 | |||
1193210,2,1,1,1,2,1,3,1,1,2 | |||
1193683,1,1,2,1,3,?,1,1,1,2 | |||
1196295,9,9,10,3,6,10,7,10,6,4 | |||
1196915,10,7,7,4,5,10,5,7,2,4 | |||
1197080,4,1,1,1,2,1,3,2,1,2 | |||
1197270,3,1,1,1,2,1,3,1,1,2 | |||
1197440,1,1,1,2,1,3,1,1,7,2 | |||
1197510,5,1,1,1,2,?,3,1,1,2 | |||
1197979,4,1,1,1,2,2,3,2,1,2 | |||
1197993,5,6,7,8,8,10,3,10,3,4 | |||
1198128,10,8,10,10,6,1,3,1,10,4 | |||
1198641,3,1,1,1,2,1,3,1,1,2 | |||
1199219,1,1,1,2,1,1,1,1,1,2 | |||
1199731,3,1,1,1,2,1,1,1,1,2 | |||
1199983,1,1,1,1,2,1,3,1,1,2 | |||
1200772,1,1,1,1,2,1,2,1,1,2 | |||
1200847,6,10,10,10,8,10,10,10,7,4 | |||
1200892,8,6,5,4,3,10,6,1,1,4 | |||
1200952,5,8,7,7,10,10,5,7,1,4 | |||
1201834,2,1,1,1,2,1,3,1,1,2 | |||
1201936,5,10,10,3,8,1,5,10,3,4 | |||
1202125,4,1,1,1,2,1,3,1,1,2 | |||
1202812,5,3,3,3,6,10,3,1,1,4 | |||
1203096,1,1,1,1,1,1,3,1,1,2 | |||
1204242,1,1,1,1,2,1,1,1,1,2 | |||
1204898,6,1,1,1,2,1,3,1,1,2 | |||
1205138,5,8,8,8,5,10,7,8,1,4 | |||
1205579,8,7,6,4,4,10,5,1,1,4 | |||
1206089,2,1,1,1,1,1,3,1,1,2 | |||
1206695,1,5,8,6,5,8,7,10,1,4 | |||
1206841,10,5,6,10,6,10,7,7,10,4 | |||
1207986,5,8,4,10,5,8,9,10,1,4 | |||
1208301,1,2,3,1,2,1,3,1,1,2 | |||
1210963,10,10,10,8,6,8,7,10,1,4 | |||
1211202,7,5,10,10,10,10,4,10,3,4 | |||
1212232,5,1,1,1,2,1,2,1,1,2 | |||
1212251,1,1,1,1,2,1,3,1,1,2 | |||
1212422,3,1,1,1,2,1,3,1,1,2 | |||
1212422,4,1,1,1,2,1,3,1,1,2 | |||
1213375,8,4,4,5,4,7,7,8,2,2 | |||
1213383,5,1,1,4,2,1,3,1,1,2 | |||
1214092,1,1,1,1,2,1,1,1,1,2 | |||
1214556,3,1,1,1,2,1,2,1,1,2 | |||
1214966,9,7,7,5,5,10,7,8,3,4 | |||
1216694,10,8,8,4,10,10,8,1,1,4 | |||
1216947,1,1,1,1,2,1,3,1,1,2 | |||
1217051,5,1,1,1,2,1,3,1,1,2 | |||
1217264,1,1,1,1,2,1,3,1,1,2 | |||
1218105,5,10,10,9,6,10,7,10,5,4 | |||
1218741,10,10,9,3,7,5,3,5,1,4 | |||
1218860,1,1,1,1,1,1,3,1,1,2 | |||
1218860,1,1,1,1,1,1,3,1,1,2 | |||
1219406,5,1,1,1,1,1,3,1,1,2 | |||
1219525,8,10,10,10,5,10,8,10,6,4 | |||
1219859,8,10,8,8,4,8,7,7,1,4 | |||
1220330,1,1,1,1,2,1,3,1,1,2 | |||
1221863,10,10,10,10,7,10,7,10,4,4 | |||
1222047,10,10,10,10,3,10,10,6,1,4 | |||
1222936,8,7,8,7,5,5,5,10,2,4 | |||
1223282,1,1,1,1,2,1,2,1,1,2 | |||
1223426,1,1,1,1,2,1,3,1,1,2 | |||
1223793,6,10,7,7,6,4,8,10,2,4 | |||
1223967,6,1,3,1,2,1,3,1,1,2 | |||
1224329,1,1,1,2,2,1,3,1,1,2 | |||
1225799,10,6,4,3,10,10,9,10,1,4 | |||
1226012,4,1,1,3,1,5,2,1,1,4 | |||
1226612,7,5,6,3,3,8,7,4,1,4 | |||
1227210,10,5,5,6,3,10,7,9,2,4 | |||
1227244,1,1,1,1,2,1,2,1,1,2 | |||
1227481,10,5,7,4,4,10,8,9,1,4 | |||
1228152,8,9,9,5,3,5,7,7,1,4 | |||
1228311,1,1,1,1,1,1,3,1,1,2 | |||
1230175,10,10,10,3,10,10,9,10,1,4 | |||
1230688,7,4,7,4,3,7,7,6,1,4 | |||
1231387,6,8,7,5,6,8,8,9,2,4 | |||
1231706,8,4,6,3,3,1,4,3,1,2 | |||
1232225,10,4,5,5,5,10,4,1,1,4 | |||
1236043,3,3,2,1,3,1,3,6,1,2 | |||
1241232,3,1,4,1,2,?,3,1,1,2 | |||
1241559,10,8,8,2,8,10,4,8,10,4 | |||
1241679,9,8,8,5,6,2,4,10,4,4 | |||
1242364,8,10,10,8,6,9,3,10,10,4 | |||
1243256,10,4,3,2,3,10,5,3,2,4 | |||
1270479,5,1,3,3,2,2,2,3,1,2 | |||
1276091,3,1,1,3,1,1,3,1,1,2 | |||
1277018,2,1,1,1,2,1,3,1,1,2 | |||
128059,1,1,1,1,2,5,5,1,1,2 | |||
1285531,1,1,1,1,2,1,3,1,1,2 | |||
1287775,5,1,1,2,2,2,3,1,1,2 | |||
144888,8,10,10,8,5,10,7,8,1,4 | |||
145447,8,4,4,1,2,9,3,3,1,4 | |||
167528,4,1,1,1,2,1,3,6,1,2 | |||
169356,3,1,1,1,2,?,3,1,1,2 | |||
183913,1,2,2,1,2,1,1,1,1,2 | |||
191250,10,4,4,10,2,10,5,3,3,4 | |||
1017023,6,3,3,5,3,10,3,5,3,2 | |||
1100524,6,10,10,2,8,10,7,3,3,4 | |||
1116116,9,10,10,1,10,8,3,3,1,4 | |||
1168736,5,6,6,2,4,10,3,6,1,4 | |||
1182404,3,1,1,1,2,1,1,1,1,2 | |||
1182404,3,1,1,1,2,1,2,1,1,2 | |||
1198641,3,1,1,1,2,1,3,1,1,2 | |||
242970,5,7,7,1,5,8,3,4,1,2 | |||
255644,10,5,8,10,3,10,5,1,3,4 | |||
263538,5,10,10,6,10,10,10,6,5,4 | |||
274137,8,8,9,4,5,10,7,8,1,4 | |||
303213,10,4,4,10,6,10,5,5,1,4 | |||
314428,7,9,4,10,10,3,5,3,3,4 | |||
1182404,5,1,4,1,2,1,3,2,1,2 | |||
1198641,10,10,6,3,3,10,4,3,2,4 | |||
320675,3,3,5,2,3,10,7,1,1,4 | |||
324427,10,8,8,2,3,4,8,7,8,4 | |||
385103,1,1,1,1,2,1,3,1,1,2 | |||
390840,8,4,7,1,3,10,3,9,2,4 | |||
411453,5,1,1,1,2,1,3,1,1,2 | |||
320675,3,3,5,2,3,10,7,1,1,4 | |||
428903,7,2,4,1,3,4,3,3,1,4 | |||
431495,3,1,1,1,2,1,3,2,1,2 | |||
432809,3,1,3,1,2,?,2,1,1,2 | |||
434518,3,1,1,1,2,1,2,1,1,2 | |||
452264,1,1,1,1,2,1,2,1,1,2 | |||
456282,1,1,1,1,2,1,3,1,1,2 | |||
476903,10,5,7,3,3,7,3,3,8,4 | |||
486283,3,1,1,1,2,1,3,1,1,2 | |||
486662,2,1,1,2,2,1,3,1,1,2 | |||
488173,1,4,3,10,4,10,5,6,1,4 | |||
492268,10,4,6,1,2,10,5,3,1,4 | |||
508234,7,4,5,10,2,10,3,8,2,4 | |||
527363,8,10,10,10,8,10,10,7,3,4 | |||
529329,10,10,10,10,10,10,4,10,10,4 | |||
535331,3,1,1,1,3,1,2,1,1,2 | |||
543558,6,1,3,1,4,5,5,10,1,4 | |||
555977,5,6,6,8,6,10,4,10,4,4 | |||
560680,1,1,1,1,2,1,1,1,1,2 | |||
561477,1,1,1,1,2,1,3,1,1,2 | |||
563649,8,8,8,1,2,?,6,10,1,4 | |||
601265,10,4,4,6,2,10,2,3,1,4 | |||
606140,1,1,1,1,2,?,2,1,1,2 | |||
606722,5,5,7,8,6,10,7,4,1,4 | |||
616240,5,3,4,3,4,5,4,7,1,2 | |||
61634,5,4,3,1,2,?,2,3,1,2 | |||
625201,8,2,1,1,5,1,1,1,1,2 | |||
63375,9,1,2,6,4,10,7,7,2,4 | |||
635844,8,4,10,5,4,4,7,10,1,4 | |||
636130,1,1,1,1,2,1,3,1,1,2 | |||
640744,10,10,10,7,9,10,7,10,10,4 | |||
646904,1,1,1,1,2,1,3,1,1,2 | |||
653777,8,3,4,9,3,10,3,3,1,4 | |||
659642,10,8,4,4,4,10,3,10,4,4 | |||
666090,1,1,1,1,2,1,3,1,1,2 | |||
666942,1,1,1,1,2,1,3,1,1,2 | |||
667204,7,8,7,6,4,3,8,8,4,4 | |||
673637,3,1,1,1,2,5,5,1,1,2 | |||
684955,2,1,1,1,3,1,2,1,1,2 | |||
688033,1,1,1,1,2,1,1,1,1,2 | |||
691628,8,6,4,10,10,1,3,5,1,4 | |||
693702,1,1,1,1,2,1,1,1,1,2 | |||
704097,1,1,1,1,1,1,2,1,1,2 | |||
704168,4,6,5,6,7,?,4,9,1,2 | |||
706426,5,5,5,2,5,10,4,3,1,4 | |||
709287,6,8,7,8,6,8,8,9,1,4 | |||
718641,1,1,1,1,5,1,3,1,1,2 | |||
721482,4,4,4,4,6,5,7,3,1,2 | |||
730881,7,6,3,2,5,10,7,4,6,4 | |||
733639,3,1,1,1,2,?,3,1,1,2 | |||
733639,3,1,1,1,2,1,3,1,1,2 | |||
733823,5,4,6,10,2,10,4,1,1,4 | |||
740492,1,1,1,1,2,1,3,1,1,2 | |||
743348,3,2,2,1,2,1,2,3,1,2 | |||
752904,10,1,1,1,2,10,5,4,1,4 | |||
756136,1,1,1,1,2,1,2,1,1,2 | |||
760001,8,10,3,2,6,4,3,10,1,4 | |||
760239,10,4,6,4,5,10,7,1,1,4 | |||
76389,10,4,7,2,2,8,6,1,1,4 | |||
764974,5,1,1,1,2,1,3,1,2,2 | |||
770066,5,2,2,2,2,1,2,2,1,2 | |||
785208,5,4,6,6,4,10,4,3,1,4 | |||
785615,8,6,7,3,3,10,3,4,2,4 | |||
792744,1,1,1,1,2,1,1,1,1,2 | |||
797327,6,5,5,8,4,10,3,4,1,4 | |||
798429,1,1,1,1,2,1,3,1,1,2 | |||
704097,1,1,1,1,1,1,2,1,1,2 | |||
806423,8,5,5,5,2,10,4,3,1,4 | |||
809912,10,3,3,1,2,10,7,6,1,4 | |||
810104,1,1,1,1,2,1,3,1,1,2 | |||
814265,2,1,1,1,2,1,1,1,1,2 | |||
814911,1,1,1,1,2,1,1,1,1,2 | |||
822829,7,6,4,8,10,10,9,5,3,4 | |||
826923,1,1,1,1,2,1,1,1,1,2 | |||
830690,5,2,2,2,3,1,1,3,1,2 | |||
831268,1,1,1,1,1,1,1,3,1,2 | |||
832226,3,4,4,10,5,1,3,3,1,4 | |||
832567,4,2,3,5,3,8,7,6,1,4 | |||
836433,5,1,1,3,2,1,1,1,1,2 | |||
837082,2,1,1,1,2,1,3,1,1,2 | |||
846832,3,4,5,3,7,3,4,6,1,2 | |||
850831,2,7,10,10,7,10,4,9,4,4 | |||
855524,1,1,1,1,2,1,2,1,1,2 | |||
857774,4,1,1,1,3,1,2,2,1,2 | |||
859164,5,3,3,1,3,3,3,3,3,4 | |||
859350,8,10,10,7,10,10,7,3,8,4 | |||
866325,8,10,5,3,8,4,4,10,3,4 | |||
873549,10,3,5,4,3,7,3,5,3,4 | |||
877291,6,10,10,10,10,10,8,10,10,4 | |||
877943,3,10,3,10,6,10,5,1,4,4 | |||
888169,3,2,2,1,4,3,2,1,1,2 | |||
888523,4,4,4,2,2,3,2,1,1,2 | |||
896404,2,1,1,1,2,1,3,1,1,2 | |||
897172,2,1,1,1,2,1,2,1,1,2 | |||
95719,6,10,10,10,8,10,7,10,7,4 | |||
160296,5,8,8,10,5,10,8,10,3,4 | |||
342245,1,1,3,1,2,1,1,1,1,2 | |||
428598,1,1,3,1,1,1,2,1,1,2 | |||
492561,4,3,2,1,3,1,2,1,1,2 | |||
493452,1,1,3,1,2,1,1,1,1,2 | |||
493452,4,1,2,1,2,1,2,1,1,2 | |||
521441,5,1,1,2,2,1,2,1,1,2 | |||
560680,3,1,2,1,2,1,2,1,1,2 | |||
636437,1,1,1,1,2,1,1,1,1,2 | |||
640712,1,1,1,1,2,1,2,1,1,2 | |||
654244,1,1,1,1,1,1,2,1,1,2 | |||
657753,3,1,1,4,3,1,2,2,1,2 | |||
685977,5,3,4,1,4,1,3,1,1,2 | |||
805448,1,1,1,1,2,1,1,1,1,2 | |||
846423,10,6,3,6,4,10,7,8,4,4 | |||
1002504,3,2,2,2,2,1,3,2,1,2 | |||
1022257,2,1,1,1,2,1,1,1,1,2 | |||
1026122,2,1,1,1,2,1,1,1,1,2 | |||
1071084,3,3,2,2,3,1,1,2,3,2 | |||
1080233,7,6,6,3,2,10,7,1,1,4 | |||
1114570,5,3,3,2,3,1,3,1,1,2 | |||
1114570,2,1,1,1,2,1,2,2,1,2 | |||
1116715,5,1,1,1,3,2,2,2,1,2 | |||
1131411,1,1,1,2,2,1,2,1,1,2 | |||
1151734,10,8,7,4,3,10,7,9,1,4 | |||
1156017,3,1,1,1,2,1,2,1,1,2 | |||
1158247,1,1,1,1,1,1,1,1,1,2 | |||
1158405,1,2,3,1,2,1,2,1,1,2 | |||
1168278,3,1,1,1,2,1,2,1,1,2 | |||
1176187,3,1,1,1,2,1,3,1,1,2 | |||
1196263,4,1,1,1,2,1,1,1,1,2 | |||
1196475,3,2,1,1,2,1,2,2,1,2 | |||
1206314,1,2,3,1,2,1,1,1,1,2 | |||
1211265,3,10,8,7,6,9,9,3,8,4 | |||
1213784,3,1,1,1,2,1,1,1,1,2 | |||
1223003,5,3,3,1,2,1,2,1,1,2 | |||
1223306,3,1,1,1,2,4,1,1,1,2 | |||
1223543,1,2,1,3,2,1,1,2,1,2 | |||
1229929,1,1,1,1,2,1,2,1,1,2 | |||
1231853,4,2,2,1,2,1,2,1,1,2 | |||
1234554,1,1,1,1,2,1,2,1,1,2 | |||
1236837,2,3,2,2,2,2,3,1,1,2 | |||
1237674,3,1,2,1,2,1,2,1,1,2 | |||
1238021,1,1,1,1,2,1,2,1,1,2 | |||
1238464,1,1,1,1,1,?,2,1,1,2 | |||
1238633,10,10,10,6,8,4,8,5,1,4 | |||
1238915,5,1,2,1,2,1,3,1,1,2 | |||
1238948,8,5,6,2,3,10,6,6,1,4 | |||
1239232,3,3,2,6,3,3,3,5,1,2 | |||
1239347,8,7,8,5,10,10,7,2,1,4 | |||
1239967,1,1,1,1,2,1,2,1,1,2 | |||
1240337,5,2,2,2,2,2,3,2,2,2 | |||
1253505,2,3,1,1,5,1,1,1,1,2 | |||
1255384,3,2,2,3,2,3,3,1,1,2 | |||
1257200,10,10,10,7,10,10,8,2,1,4 | |||
1257648,4,3,3,1,2,1,3,3,1,2 | |||
1257815,5,1,3,1,2,1,2,1,1,2 | |||
1257938,3,1,1,1,2,1,1,1,1,2 | |||
1258549,9,10,10,10,10,10,10,10,1,4 | |||
1258556,5,3,6,1,2,1,1,1,1,2 | |||
1266154,8,7,8,2,4,2,5,10,1,4 | |||
1272039,1,1,1,1,2,1,2,1,1,2 | |||
1276091,2,1,1,1,2,1,2,1,1,2 | |||
1276091,1,3,1,1,2,1,2,2,1,2 | |||
1276091,5,1,1,3,4,1,3,2,1,2 | |||
1277629,5,1,1,1,2,1,2,2,1,2 | |||
1293439,3,2,2,3,2,1,1,1,1,2 | |||
1293439,6,9,7,5,5,8,4,2,1,2 | |||
1294562,10,8,10,1,3,10,5,1,1,4 | |||
1295186,10,10,10,1,6,1,2,8,1,4 | |||
527337,4,1,1,1,2,1,1,1,1,2 | |||
558538,4,1,3,3,2,1,1,1,1,2 | |||
566509,5,1,1,1,2,1,1,1,1,2 | |||
608157,10,4,3,10,4,10,10,1,1,4 | |||
677910,5,2,2,4,2,4,1,1,1,2 | |||
734111,1,1,1,3,2,3,1,1,1,2 | |||
734111,1,1,1,1,2,2,1,1,1,2 | |||
780555,5,1,1,6,3,1,2,1,1,2 | |||
827627,2,1,1,1,2,1,1,1,1,2 | |||
1049837,1,1,1,1,2,1,1,1,1,2 | |||
1058849,5,1,1,1,2,1,1,1,1,2 | |||
1182404,1,1,1,1,1,1,1,1,1,2 | |||
1193544,5,7,9,8,6,10,8,10,1,4 | |||
1201870,4,1,1,3,1,1,2,1,1,2 | |||
1202253,5,1,1,1,2,1,1,1,1,2 | |||
1227081,3,1,1,3,2,1,1,1,1,2 | |||
1230994,4,5,5,8,6,10,10,7,1,4 | |||
1238410,2,3,1,1,3,1,1,1,1,2 | |||
1246562,10,2,2,1,2,6,1,1,2,4 | |||
1257470,10,6,5,8,5,10,8,6,1,4 | |||
1259008,8,8,9,6,6,3,10,10,1,4 | |||
1266124,5,1,2,1,2,1,1,1,1,2 | |||
1267898,5,1,3,1,2,1,1,1,1,2 | |||
1268313,5,1,1,3,2,1,1,1,1,2 | |||
1268804,3,1,1,1,2,5,1,1,1,2 | |||
1276091,6,1,1,3,2,1,1,1,1,2 | |||
1280258,4,1,1,1,2,1,1,2,1,2 | |||
1293966,4,1,1,1,2,1,1,1,1,2 | |||
1296572,10,9,8,7,6,4,7,10,3,4 | |||
1298416,10,6,6,2,4,10,9,7,1,4 | |||
1299596,6,6,6,5,4,10,7,6,2,4 | |||
1105524,4,1,1,1,2,1,1,1,1,2 | |||
1181685,1,1,2,1,2,1,2,1,1,2 | |||
1211594,3,1,1,1,1,1,2,1,1,2 | |||
1238777,6,1,1,3,2,1,1,1,1,2 | |||
1257608,6,1,1,1,1,1,1,1,1,2 | |||
1269574,4,1,1,1,2,1,1,1,1,2 | |||
1277145,5,1,1,1,2,1,1,1,1,2 | |||
1287282,3,1,1,1,2,1,1,1,1,2 | |||
1296025,4,1,2,1,2,1,1,1,1,2 | |||
1296263,4,1,1,1,2,1,1,1,1,2 | |||
1296593,5,2,1,1,2,1,1,1,1,2 | |||
1299161,4,8,7,10,4,10,7,5,1,4 | |||
1301945,5,1,1,1,1,1,1,1,1,2 | |||
1302428,5,3,2,4,2,1,1,1,1,2 | |||
1318169,9,10,10,10,10,5,10,10,10,4 | |||
474162,8,7,8,5,5,10,9,10,1,4 | |||
787451,5,1,2,1,2,1,1,1,1,2 | |||
1002025,1,1,1,3,1,3,1,1,1,2 | |||
1070522,3,1,1,1,1,1,2,1,1,2 | |||
1073960,10,10,10,10,6,10,8,1,5,4 | |||
1076352,3,6,4,10,3,3,3,4,1,4 | |||
1084139,6,3,2,1,3,4,4,1,1,4 | |||
1115293,1,1,1,1,2,1,1,1,1,2 | |||
1119189,5,8,9,4,3,10,7,1,1,4 | |||
1133991,4,1,1,1,1,1,2,1,1,2 | |||
1142706,5,10,10,10,6,10,6,5,2,4 | |||
1155967,5,1,2,10,4,5,2,1,1,2 | |||
1170945,3,1,1,1,1,1,2,1,1,2 | |||
1181567,1,1,1,1,1,1,1,1,1,2 | |||
1182404,4,2,1,1,2,1,1,1,1,2 | |||
1204558,4,1,1,1,2,1,2,1,1,2 | |||
1217952,4,1,1,1,2,1,2,1,1,2 | |||
1224565,6,1,1,1,2,1,3,1,1,2 | |||
1238186,4,1,1,1,2,1,2,1,1,2 | |||
1253917,4,1,1,2,2,1,2,1,1,2 | |||
1265899,4,1,1,1,2,1,3,1,1,2 | |||
1268766,1,1,1,1,2,1,1,1,1,2 | |||
1277268,3,3,1,1,2,1,1,1,1,2 | |||
1286943,8,10,10,10,7,5,4,8,7,4 | |||
1295508,1,1,1,1,2,4,1,1,1,2 | |||
1297327,5,1,1,1,2,1,1,1,1,2 | |||
1297522,2,1,1,1,2,1,1,1,1,2 | |||
1298360,1,1,1,1,2,1,1,1,1,2 | |||
1299924,5,1,1,1,2,1,2,1,1,2 | |||
1299994,5,1,1,1,2,1,1,1,1,2 | |||
1304595,3,1,1,1,1,1,2,1,1,2 | |||
1306282,6,6,7,10,3,10,8,10,2,4 | |||
1313325,4,10,4,7,3,10,9,10,1,4 | |||
1320077,1,1,1,1,1,1,1,1,1,2 | |||
1320077,1,1,1,1,1,1,2,1,1,2 | |||
1320304,3,1,2,2,2,1,1,1,1,2 | |||
1330439,4,7,8,3,4,10,9,1,1,4 | |||
333093,1,1,1,1,3,1,1,1,1,2 | |||
369565,4,1,1,1,3,1,1,1,1,2 | |||
412300,10,4,5,4,3,5,7,3,1,4 | |||
672113,7,5,6,10,4,10,5,3,1,4 | |||
749653,3,1,1,1,2,1,2,1,1,2 | |||
769612,3,1,1,2,2,1,1,1,1,2 | |||
769612,4,1,1,1,2,1,1,1,1,2 | |||
798429,4,1,1,1,2,1,3,1,1,2 | |||
807657,6,1,3,2,2,1,1,1,1,2 | |||
8233704,4,1,1,1,1,1,2,1,1,2 | |||
837480,7,4,4,3,4,10,6,9,1,4 | |||
867392,4,2,2,1,2,1,2,1,1,2 | |||
869828,1,1,1,1,1,1,3,1,1,2 | |||
1043068,3,1,1,1,2,1,2,1,1,2 | |||
1056171,2,1,1,1,2,1,2,1,1,2 | |||
1061990,1,1,3,2,2,1,3,1,1,2 | |||
1113061,5,1,1,1,2,1,3,1,1,2 | |||
1116192,5,1,2,1,2,1,3,1,1,2 | |||
1135090,4,1,1,1,2,1,2,1,1,2 | |||
1145420,6,1,1,1,2,1,2,1,1,2 | |||
1158157,5,1,1,1,2,2,2,1,1,2 | |||
1171578,3,1,1,1,2,1,1,1,1,2 | |||
1174841,5,3,1,1,2,1,1,1,1,2 | |||
1184586,4,1,1,1,2,1,2,1,1,2 | |||
1186936,2,1,3,2,2,1,2,1,1,2 | |||
1197527,5,1,1,1,2,1,2,1,1,2 | |||
1222464,6,10,10,10,4,10,7,10,1,4 | |||
1240603,2,1,1,1,1,1,1,1,1,2 | |||
1240603,3,1,1,1,1,1,1,1,1,2 | |||
1241035,7,8,3,7,4,5,7,8,2,4 | |||
1287971,3,1,1,1,2,1,2,1,1,2 | |||
1289391,1,1,1,1,2,1,3,1,1,2 | |||
1299924,3,2,2,2,2,1,4,2,1,2 | |||
1306339,4,4,2,1,2,5,2,1,2,2 | |||
1313658,3,1,1,1,2,1,1,1,1,2 | |||
1313982,4,3,1,1,2,1,4,8,1,2 | |||
1321264,5,2,2,2,1,1,2,1,1,2 | |||
1321321,5,1,1,3,2,1,1,1,1,2 | |||
1321348,2,1,1,1,2,1,2,1,1,2 | |||
1321931,5,1,1,1,2,1,2,1,1,2 | |||
1321942,5,1,1,1,2,1,3,1,1,2 | |||
1321942,5,1,1,1,2,1,3,1,1,2 | |||
1328331,1,1,1,1,2,1,3,1,1,2 | |||
1328755,3,1,1,1,2,1,2,1,1,2 | |||
1331405,4,1,1,1,2,1,3,2,1,2 | |||
1331412,5,7,10,10,5,10,10,10,1,4 | |||
1333104,3,1,2,1,2,1,3,1,1,2 | |||
1334071,4,1,1,1,2,3,2,1,1,2 | |||
1343068,8,4,4,1,6,10,2,5,2,4 | |||
1343374,10,10,8,10,6,5,10,3,1,4 | |||
1344121,8,10,4,4,8,10,8,2,1,4 | |||
142932,7,6,10,5,3,10,9,10,2,4 | |||
183936,3,1,1,1,2,1,2,1,1,2 | |||
324382,1,1,1,1,2,1,2,1,1,2 | |||
378275,10,9,7,3,4,2,7,7,1,4 | |||
385103,5,1,2,1,2,1,3,1,1,2 | |||
690557,5,1,1,1,2,1,2,1,1,2 | |||
695091,1,1,1,1,2,1,2,1,1,2 | |||
695219,1,1,1,1,2,1,2,1,1,2 | |||
824249,1,1,1,1,2,1,3,1,1,2 | |||
871549,5,1,2,1,2,1,2,1,1,2 | |||
878358,5,7,10,6,5,10,7,5,1,4 | |||
1107684,6,10,5,5,4,10,6,10,1,4 | |||
1115762,3,1,1,1,2,1,1,1,1,2 | |||
1217717,5,1,1,6,3,1,1,1,1,2 | |||
1239420,1,1,1,1,2,1,1,1,1,2 | |||
1254538,8,10,10,10,6,10,10,10,1,4 | |||
1261751,5,1,1,1,2,1,2,2,1,2 | |||
1268275,9,8,8,9,6,3,4,1,1,4 | |||
1272166,5,1,1,1,2,1,1,1,1,2 | |||
1294261,4,10,8,5,4,1,10,1,1,4 | |||
1295529,2,5,7,6,4,10,7,6,1,4 | |||
1298484,10,3,4,5,3,10,4,1,1,4 | |||
1311875,5,1,2,1,2,1,1,1,1,2 | |||
1315506,4,8,6,3,4,10,7,1,1,4 | |||
1320141,5,1,1,1,2,1,2,1,1,2 | |||
1325309,4,1,2,1,2,1,2,1,1,2 | |||
1333063,5,1,3,1,2,1,3,1,1,2 | |||
1333495,3,1,1,1,2,1,2,1,1,2 | |||
1334659,5,2,4,1,1,1,1,1,1,2 | |||
1336798,3,1,1,1,2,1,2,1,1,2 | |||
1344449,1,1,1,1,1,1,2,1,1,2 | |||
1350568,4,1,1,1,2,1,2,1,1,2 | |||
1352663,5,4,6,8,4,1,8,10,1,4 | |||
188336,5,3,2,8,5,10,8,1,2,4 | |||
352431,10,5,10,3,5,8,7,8,3,4 | |||
353098,4,1,1,2,2,1,1,1,1,2 | |||
411453,1,1,1,1,2,1,1,1,1,2 | |||
557583,5,10,10,10,10,10,10,1,1,4 | |||
636375,5,1,1,1,2,1,1,1,1,2 | |||
736150,10,4,3,10,3,10,7,1,2,4 | |||
803531,5,10,10,10,5,2,8,5,1,4 | |||
822829,8,10,10,10,6,10,10,10,10,4 | |||
1016634,2,3,1,1,2,1,2,1,1,2 | |||
1031608,2,1,1,1,1,1,2,1,1,2 | |||
1041043,4,1,3,1,2,1,2,1,1,2 | |||
1042252,3,1,1,1,2,1,2,1,1,2 | |||
1057067,1,1,1,1,1,?,1,1,1,2 | |||
1061990,4,1,1,1,2,1,2,1,1,2 | |||
1073836,5,1,1,1,2,1,2,1,1,2 | |||
1083817,3,1,1,1,2,1,2,1,1,2 | |||
1096352,6,3,3,3,3,2,6,1,1,2 | |||
1140597,7,1,2,3,2,1,2,1,1,2 | |||
1149548,1,1,1,1,2,1,1,1,1,2 | |||
1174009,5,1,1,2,1,1,2,1,1,2 | |||
1183596,3,1,3,1,3,4,1,1,1,2 | |||
1190386,4,6,6,5,7,6,7,7,3,4 | |||
1190546,2,1,1,1,2,5,1,1,1,2 | |||
1213273,2,1,1,1,2,1,1,1,1,2 | |||
1218982,4,1,1,1,2,1,1,1,1,2 | |||
1225382,6,2,3,1,2,1,1,1,1,2 | |||
1235807,5,1,1,1,2,1,2,1,1,2 | |||
1238777,1,1,1,1,2,1,1,1,1,2 | |||
1253955,8,7,4,4,5,3,5,10,1,4 | |||
1257366,3,1,1,1,2,1,1,1,1,2 | |||
1260659,3,1,4,1,2,1,1,1,1,2 | |||
1268952,10,10,7,8,7,1,10,10,3,4 | |||
1275807,4,2,4,3,2,2,2,1,1,2 | |||
1277792,4,1,1,1,2,1,1,1,1,2 | |||
1277792,5,1,1,3,2,1,1,1,1,2 | |||
1285722,4,1,1,3,2,1,1,1,1,2 | |||
1288608,3,1,1,1,2,1,2,1,1,2 | |||
1290203,3,1,1,1,2,1,2,1,1,2 | |||
1294413,1,1,1,1,2,1,1,1,1,2 | |||
1299596,2,1,1,1,2,1,1,1,1,2 | |||
1303489,3,1,1,1,2,1,2,1,1,2 | |||
1311033,1,2,2,1,2,1,1,1,1,2 | |||
1311108,1,1,1,3,2,1,1,1,1,2 | |||
1315807,5,10,10,10,10,2,10,10,10,4 | |||
1318671,3,1,1,1,2,1,2,1,1,2 | |||
1319609,3,1,1,2,3,4,1,1,1,2 | |||
1323477,1,2,1,3,2,1,2,1,1,2 | |||
1324572,5,1,1,1,2,1,2,2,1,2 | |||
1324681,4,1,1,1,2,1,2,1,1,2 | |||
1325159,3,1,1,1,2,1,3,1,1,2 | |||
1326892,3,1,1,1,2,1,2,1,1,2 | |||
1330361,5,1,1,1,2,1,2,1,1,2 | |||
1333877,5,4,5,1,8,1,3,6,1,2 | |||
1334015,7,8,8,7,3,10,7,2,3,4 | |||
1334667,1,1,1,1,2,1,1,1,1,2 | |||
1339781,1,1,1,1,2,1,2,1,1,2 | |||
1339781,4,1,1,1,2,1,3,1,1,2 | |||
13454352,1,1,3,1,2,1,2,1,1,2 | |||
1345452,1,1,3,1,2,1,2,1,1,2 | |||
1345593,3,1,1,3,2,1,2,1,1,2 | |||
1347749,1,1,1,1,2,1,1,1,1,2 | |||
1347943,5,2,2,2,2,1,1,1,2,2 | |||
1348851,3,1,1,1,2,1,3,1,1,2 | |||
1350319,5,7,4,1,6,1,7,10,3,4 | |||
1350423,5,10,10,8,5,5,7,10,1,4 | |||
1352848,3,10,7,8,5,8,7,4,1,4 | |||
1353092,3,2,1,2,2,1,3,1,1,2 | |||
1354840,2,1,1,1,2,1,3,1,1,2 | |||
1354840,5,3,2,1,3,1,1,1,1,2 | |||
1355260,1,1,1,1,2,1,2,1,1,2 | |||
1365075,4,1,4,1,2,1,1,1,1,2 | |||
1365328,1,1,2,1,2,1,2,1,1,2 | |||
1368267,5,1,1,1,2,1,1,1,1,2 | |||
1368273,1,1,1,1,2,1,1,1,1,2 | |||
1368882,2,1,1,1,2,1,1,1,1,2 | |||
1369821,10,10,10,10,5,10,10,10,7,4 | |||
1371026,5,10,10,10,4,10,5,6,3,4 | |||
1371920,5,1,1,1,2,1,3,2,1,2 | |||
466906,1,1,1,1,2,1,1,1,1,2 | |||
466906,1,1,1,1,2,1,1,1,1,2 | |||
534555,1,1,1,1,2,1,1,1,1,2 | |||
536708,1,1,1,1,2,1,1,1,1,2 | |||
566346,3,1,1,1,2,1,2,3,1,2 | |||
603148,4,1,1,1,2,1,1,1,1,2 | |||
654546,1,1,1,1,2,1,1,1,8,2 | |||
654546,1,1,1,3,2,1,1,1,1,2 | |||
695091,5,10,10,5,4,5,4,4,1,4 | |||
714039,3,1,1,1,2,1,1,1,1,2 | |||
763235,3,1,1,1,2,1,2,1,2,2 | |||
776715,3,1,1,1,3,2,1,1,1,2 | |||
841769,2,1,1,1,2,1,1,1,1,2 | |||
888820,5,10,10,3,7,3,8,10,2,4 | |||
897471,4,8,6,4,3,4,10,6,1,4 | |||
897471,4,8,8,5,4,5,10,4,1,4 |
@@ -1,15 +0,0 @@ | |||
import os | |||
c = get_config() | |||
# Kernel config | |||
c.IPKernelApp.pylab = 'inline' # if you want plotting support always | |||
# Notebook config | |||
#c.NotebookApp.certfile = os.path.join(os.getcwd(), u'mycert.pem') | |||
#c.NotebookApp.ip = '*' | |||
c.NotebookApp.open_browser = True | |||
#c.NotebookApp.NotebookManager.notebook_dir = os.path.join(os.environ["HOME"], "Desktop", "Tutorial") | |||
#c.NotebookApp.notebook_dir = os.path.join(os.environ["HOME"], "Desktop", "Tutorial") | |||
# It's a good idea to put it on a known, fixed port | |||
c.NotebookApp.port = 9999 | |||
#c.NotebookApp.password = u'sha1:60e7d2645fb4:97064d28bad2a4a12950055c830d9637b652c9ec' |
@@ -1,25 +0,0 @@ | |||
import numpy as np | |||
import matplotlib.pyplot as plt | |||
def plot_decision_boundary(clf, X): | |||
w = clf.coef_.ravel() | |||
a = -w[0] / w[1] | |||
xx = np.linspace(np.min(X[:, 0]), np.max(X[:, 0])) | |||
yy = a * xx - clf.intercept_ / w[1] | |||
plt.plot(xx, yy) | |||
plt.xticks(()) | |||
plt.yticks(()) | |||
def plotEllipse(pos,P,edge='k',face='none',line_width='.1'): | |||
from numpy.linalg import svd | |||
from matplotlib.patches import Ellipse | |||
import math | |||
from numpy import pi | |||
from matplotlib.pyplot import gca | |||
U, s , Vh = svd(P) | |||
orient = math.atan2(U[1,0],U[0,0])*180/pi | |||
ellipsePlot = Ellipse(xy=pos, width=2.0*math.sqrt(s[0]), height=2.0*math.sqrt(s[1]), angle=orient,facecolor=face, edgecolor=edge, lw=line_width) | |||
ax = gca() | |||
ax.add_patch(ellipsePlot); | |||
return ellipsePlot |
@@ -11,4 +11,19 @@ MachineLearningNotebooks/08.%20Practical_NeuralNets.ipynb | |||
http://sofasofa.io/competitions.php?type=practice | |||
https://www.kaggle.com/competitions | |||
https://github.com/wmpscc/DataMiningNotesAndPractice/blob/master/2.KMeans%E7%AE%97%E6%B3%95%E4%B8%8E%E4%BA%A4%E9%80%9A%E4%BA%8B%E6%95%85%E7%90%86%E8%B5%94%E5%AE%A1%E6%A0%B8%E9%A2%84%E6%B5%8B.md | |||
https://github.com/wmpscc/DataMiningNotesAndPractice/blob/master/2.KMeans%E7%AE%97%E6%B3%95%E4%B8%8E%E4%BA%A4%E9%80%9A%E4%BA%8B%E6%95%85%E7%90%86%E8%B5%94%E5%AE%A1%E6%A0%B8%E9%A2%84%E6%B5%8B.md | |||
evaluation metrics | |||
http://localhost:8889/notebooks/machineLearning/10_digits_classification.ipynb | |||
model selection and assessment | |||
http://localhost:8889/notebooks/machineLearning/notebooks/01%20-%20Model%20Selection%20and%20Assessment.ipynb | |||
Programming Multiclass Logistic Regression | |||
http://localhost:8889/notebooks/MachineLearningNotebooks/05.%20Logistic%20Regression.ipynb | |||
Equation for MLP | |||
http://localhost:8889/notebooks/MachineLearningNotebooks/07.%20MLP%20Neural%20Networks.ipynb | |||
Optimization methods | |||
http://localhost:8889/notebooks/MachineLearningNotebooks/06.%20Optimization.ipynb |
@@ -0,0 +1,169 @@ | |||
# -*- coding: utf-8 -*- | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
# kernelspec: | |||
# display_name: Python 3 | |||
# language: python | |||
# name: python3 | |||
# language_info: | |||
# codemirror_mode: | |||
# name: ipython | |||
# version: 3 | |||
# file_extension: .py | |||
# mimetype: text/x-python | |||
# name: python | |||
# nbconvert_exporter: python | |||
# pygments_lexer: ipython3 | |||
# version: 3.5.2 | |||
# --- | |||
# # Exercise - 交通事故理赔审核预测 | |||
# | |||
# | |||
# 这个比赛的链接:http://sofasofa.io/competition.php?id=2 | |||
# | |||
# | |||
# * 任务类型:二元分类 | |||
# | |||
# * 背景介绍:在交通摩擦(事故)发生后,理赔员会前往现场勘察、采集信息,这些信息往往影响着车主是否能够得到保险公司的理赔。训练集数据包括理赔人员在现场对该事故方采集的36条信息,信息已经被编码,以及该事故方最终是否获得理赔。我们的任务是根据这36条信息预测该事故方没有被理赔的概率。 | |||
# | |||
# * 数据介绍:训练集中共有200000条样本,预测集中有80000条样本。 | |||
#  | |||
# | |||
# * 评价方法:Precision-Recall AUC | |||
# | |||
# ## Demo code | |||
# | |||
import pandas as pd | |||
import numpy as np | |||
import os | |||
import matplotlib.pyplot as plt | |||
# %matplotlib inline | |||
# read data | |||
homePath = "data" | |||
trainPath = os.path.join(homePath, "train.csv") | |||
testPath = os.path.join(homePath, "test.csv") | |||
submitPath = os.path.join(homePath, "sample_submit.csv") | |||
trainData = pd.read_csv(trainPath) | |||
testData = pd.read_csv(testPath) | |||
submitData = pd.read_csv(submitPath) | |||
# 参照数据说明,CaseID这列是没有意义的编号,因此这里将他丢弃。 | |||
# | |||
# ~drop()函数:axis指沿着哪个轴,0为行,1为列;inplace指是否在原数据上直接操作 | |||
# | |||
# 去掉没有意义的一列 | |||
trainData.drop("CaseId", axis=1, inplace=True) | |||
testData.drop("CaseId", axis=1, inplace=True) | |||
# # 快速了解数据 | |||
# | |||
# ~head():默认显示前5行数据,可指定显示多行,例如.head(15)显示前15行 | |||
# | |||
trainData.head(15) | |||
# 显示数据简略信息,可以每列有多少非空的值,以及每列数据对应的数据类型。 | |||
# | |||
# | |||
trainData.info() | |||
# ~hist():绘制直方图,参数figsize可指定输出图片的尺寸。 | |||
# | |||
trainData.hist(figsize=(20, 20)) | |||
# 想要了解特征之间的相关性,可计算相关系数矩阵。然后可对某个特征来排序。 | |||
# | |||
# | |||
corr_matrix = trainData.corr() | |||
corr_matrix["Evaluation"].sort_values(ascending=False) # ascending=False 降序排列 | |||
# 从训练集中分离标签 | |||
y = trainData['Evaluation'] | |||
trainData.drop("Evaluation", axis=1, inplace=True) | |||
# 使用K-Means训练模型 | |||
# | |||
# KMeans(): | |||
# * `n_clusters`指要预测的有几个类; | |||
# * `init`指初始化中心的方法,默认使用的是`k-means++`方法,而非经典的K-means方法的随机采样初始化,当然你可以设置为random使用随机初始化; | |||
# * `n_jobs`指定使用CPU核心数,-1为使用全部CPU。 | |||
# + | |||
# do k-means | |||
from sklearn.cluster import KMeans | |||
est = KMeans(n_clusters=2, init="k-means++", n_jobs=-1) | |||
est.fit(trainData, y) | |||
y_train = est.predict(trainData) | |||
y_pred = est.predict(testData) | |||
# 保存预测的结果 | |||
submitData['Evaluation'] = y_pred | |||
submitData.to_csv("submit_data.csv", index=False) | |||
# + | |||
# calculate accuracy | |||
from sklearn.metrics import accuracy_score | |||
acc_train = accuracy_score(y, y_train) | |||
print("acc_train = %f" % (acc_train)) | |||
# - | |||
# ## 随机森林 | |||
# | |||
# 使用K-means可能得到的结果没那么理想。在官网上,举办方给出了两个标杆模型,效果最好的是随机森林。以下是代码,读者可以自己测试。 | |||
# | |||
# | |||
# + | |||
import pandas as pd | |||
from sklearn.ensemble import RandomForestClassifier | |||
from sklearn.metrics import accuracy_score | |||
# 读取数据 | |||
train = pd.read_csv("data/train.csv") | |||
test = pd.read_csv("data/test.csv") | |||
submit = pd.read_csv("data/sample_submit.csv") | |||
# 删除id | |||
train.drop('CaseId', axis=1, inplace=True) | |||
test.drop('CaseId', axis=1, inplace=True) | |||
# 取出训练集的y | |||
y_train = train.pop('Evaluation') | |||
# 建立随机森林模型 | |||
clf = RandomForestClassifier(n_estimators=100, random_state=0) | |||
clf.fit(train, y_train) | |||
y_pred = clf.predict_proba(test)[:, 1] | |||
# 输出预测结果至my_RF_prediction.csv | |||
submit['Evaluation'] = y_pred | |||
submit.to_csv('my_RF_prediction.csv', index=False) | |||
# + | |||
# freature importances | |||
print(clf.feature_importances_) | |||
# Train accuracy | |||
from sklearn.metrics import accuracy_score | |||
y_train_pred = clf.predict(train) | |||
print(y_train_pred) | |||
acc_train = accuracy_score(y_train, y_train_pred) | |||
print("acc_train = %f" % (acc_train)) |
@@ -0,0 +1,71 @@ | |||
{ | |||
"cells": [ | |||
{ | |||
"cell_type": "markdown", | |||
"metadata": {}, | |||
"source": [ | |||
"# Titanic\n", | |||
"\n", | |||
"## Competition Description\n", | |||
"The sinking of the RMS Titanic is one of the most infamous shipwrecks in history. On April 15, 1912, during her maiden voyage, the Titanic sank after colliding with an iceberg, killing 1502 out of 2224 passengers and crew. This sensational tragedy shocked the international community and led to better safety regulations for ships.\n", | |||
"\n", | |||
"One of the reasons that the shipwreck led to such loss of life was that there were not enough lifeboats for the passengers and crew. Although there was some element of luck involved in surviving the sinking, some groups of people were more likely to survive than others, such as women, children, and the upper-class.\n", | |||
"\n", | |||
"In this challenge, we ask you to complete the analysis of what sorts of people were likely to survive. In particular, we ask you to apply the tools of machine learning to predict which passengers survived the tragedy.\n", | |||
"\n", | |||
"## Practice Skills\n", | |||
"* Binary classification\n", | |||
"* Python & SKLearn\n", | |||
"\n", | |||
"## Data\n", | |||
"The data has been split into two groups:\n", | |||
"\n", | |||
"* training set (train.csv)\n", | |||
"* test set (test.csv)\n", | |||
"\n", | |||
"The training set should be used to build your machine learning models. For the training set, we provide the outcome (also known as the `ground truth`) for each passenger. Your model will be based on `features` like passengers' gender and class. You can also use feature engineering to create new features.\n", | |||
"\n", | |||
"The test set should be used to see how well your model performs on unseen data. For the test set, we do not provide the ground truth for each passenger. It is your job to predict these outcomes. For each passenger in the test set, use the model you trained to predict whether or not they survived the sinking of the Titanic.\n", | |||
"\n", | |||
"We also include `gender_submission.csv`, a set of predictions that assume all and only female passengers survive, as an example of what a submission file should look like.\n", | |||
"\n", | |||
"### Data description\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"### Variable Notes\n", | |||
"pclass: A proxy for socio-economic status (SES)\n", | |||
"* 1st = Upper\n", | |||
"* 2nd = Middle\n", | |||
"* 3rd = Lower\n", | |||
"\n", | |||
"\n", | |||
"## Links\n", | |||
"* [Titanic: Machine Learning from Disaster](https://www.kaggle.com/c/titanic)" | |||
] | |||
} | |||
], | |||
"metadata": { | |||
"kernelspec": { | |||
"display_name": "Python 3", | |||
"language": "python", | |||
"name": "python3" | |||
}, | |||
"language_info": { | |||
"codemirror_mode": { | |||
"name": "ipython", | |||
"version": 3 | |||
}, | |||
"file_extension": ".py", | |||
"mimetype": "text/x-python", | |||
"name": "python", | |||
"nbconvert_exporter": "python", | |||
"pygments_lexer": "ipython3", | |||
"version": "3.5.2" | |||
}, | |||
"main_language": "python" | |||
}, | |||
"nbformat": 4, | |||
"nbformat_minor": 2 | |||
} |
@@ -0,0 +1,58 @@ | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
# kernelspec: | |||
# display_name: Python 3 | |||
# language: python | |||
# name: python3 | |||
# language_info: | |||
# codemirror_mode: | |||
# name: ipython | |||
# version: 3 | |||
# file_extension: .py | |||
# mimetype: text/x-python | |||
# name: python | |||
# nbconvert_exporter: python | |||
# pygments_lexer: ipython3 | |||
# version: 3.5.2 | |||
# --- | |||
# # Titanic | |||
# | |||
# ## Competition Description | |||
# The sinking of the RMS Titanic is one of the most infamous shipwrecks in history. On April 15, 1912, during her maiden voyage, the Titanic sank after colliding with an iceberg, killing 1502 out of 2224 passengers and crew. This sensational tragedy shocked the international community and led to better safety regulations for ships. | |||
# | |||
# One of the reasons that the shipwreck led to such loss of life was that there were not enough lifeboats for the passengers and crew. Although there was some element of luck involved in surviving the sinking, some groups of people were more likely to survive than others, such as women, children, and the upper-class. | |||
# | |||
# In this challenge, we ask you to complete the analysis of what sorts of people were likely to survive. In particular, we ask you to apply the tools of machine learning to predict which passengers survived the tragedy. | |||
# | |||
# ## Practice Skills | |||
# * Binary classification | |||
# * Python & SKLearn | |||
# | |||
# ## Data | |||
# The data has been split into two groups: | |||
# | |||
# * training set (train.csv) | |||
# * test set (test.csv) | |||
# | |||
# The training set should be used to build your machine learning models. For the training set, we provide the outcome (also known as the `ground truth`) for each passenger. Your model will be based on `features` like passengers' gender and class. You can also use feature engineering to create new features. | |||
# | |||
# The test set should be used to see how well your model performs on unseen data. For the test set, we do not provide the ground truth for each passenger. It is your job to predict these outcomes. For each passenger in the test set, use the model you trained to predict whether or not they survived the sinking of the Titanic. | |||
# | |||
# We also include `gender_submission.csv`, a set of predictions that assume all and only female passengers survive, as an example of what a submission file should look like. | |||
# | |||
# ### Data description | |||
#  | |||
#  | |||
# | |||
# | |||
# ### Variable Notes | |||
# pclass: A proxy for socio-economic status (SES) | |||
# * 1st = Upper | |||
# * 2nd = Middle | |||
# * 3rd = Lower | |||
# | |||
# | |||
# ## Links | |||
# * [Titanic: Machine Learning from Disaster](https://www.kaggle.com/c/titanic) |
@@ -0,0 +1,98 @@ | |||
{ | |||
"cells": [ | |||
{ | |||
"cell_type": "markdown", | |||
"metadata": {}, | |||
"source": [ | |||
"# 混淆矩阵(confusion matrix)\n", | |||
"\n", | |||
"混淆矩阵是用来总结一个分类器结果的矩阵。对于k元分类,其实它就是一个$k \\times k$的表格,用来记录分类器的预测结果。\n", | |||
"\n", | |||
"对于最常见的二元分类来说,它的混淆矩阵是2乘2的,如下\n", | |||
"\n", | |||
"\n", | |||
"* `TP` = True Postive = 真阳性\n", | |||
"* `FP` = False Positive = 假阳性\n", | |||
"* `FN` = False Negative = 假阴性\n", | |||
"* `TN` = True Negative = 真阴性\n", | |||
"\n", | |||
"你要的例子来了。。。比如我们一个模型对15个样本进行预测,然后结果如下。\n", | |||
"\n", | |||
"* 预测值:1 1 1 1 1 0 0 0 0 0 1 1 1 0 1\n", | |||
"* 真实值:0 1 1 0 1 1 0 0 1 0 1 0 1 0 0\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"这个就是混淆矩阵。混淆矩阵中的这四个数值,经常被用来定义其他一些度量。\n", | |||
"\n", | |||
"\n", | |||
"### 准确度\n", | |||
"```\n", | |||
"Accuracy = (TP+TN) / (TP+TN+FN+TN)\n", | |||
"```\n", | |||
"\n", | |||
"在上面的例子中,准确度 = (5+4) / 15 = 0.6\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"### 精度(precision, 或者PPV, positive predictive value) \n", | |||
"```\n", | |||
"precision = TP / (TP + FP)\n", | |||
"```\n", | |||
"在上面的例子中,精度 = 5 / (5+4) = 0.556\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"### 召回(recall, 或者敏感度,sensitivity,真阳性率,TPR,True Positive Rate) \n", | |||
"\n", | |||
"```\n", | |||
"recall = TP / (TP + FN)\n", | |||
"```\n", | |||
"\n", | |||
"在上面的例子中,召回 = 5 / (5+2) = 0.714\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"### 特异度(specificity,或者真阴性率,TNR,True Negative Rate)\n", | |||
"```\n", | |||
"specificity = TN / (TN + FP)\n", | |||
"```\n", | |||
"\n", | |||
"在上面的例子中,特异度 = 4 / (4+2) = 0.667\n", | |||
"\n", | |||
"\n", | |||
"\n", | |||
"### F1-值(F1-score) \n", | |||
"```\n", | |||
"F1 = 2*TP / (2*TP+FP+FN) \n", | |||
"```\n", | |||
"在上面的例子中,F1-值 = 2*5 / (2*5+4+2) = 0.625\n", | |||
"\n", | |||
"\n" | |||
] | |||
} | |||
], | |||
"metadata": { | |||
"kernelspec": { | |||
"display_name": "Python 3", | |||
"language": "python", | |||
"name": "python3" | |||
}, | |||
"language_info": { | |||
"codemirror_mode": { | |||
"name": "ipython", | |||
"version": 3 | |||
}, | |||
"file_extension": ".py", | |||
"mimetype": "text/x-python", | |||
"name": "python", | |||
"nbconvert_exporter": "python", | |||
"pygments_lexer": "ipython3", | |||
"version": "3.5.2" | |||
}, | |||
"main_language": "python" | |||
}, | |||
"nbformat": 4, | |||
"nbformat_minor": 2 | |||
} |
@@ -0,0 +1,87 @@ | |||
# -*- coding: utf-8 -*- | |||
# --- | |||
# jupyter: | |||
# jupytext_format_version: '1.2' | |||
# kernelspec: | |||
# display_name: Python 3 | |||
# language: python | |||
# name: python3 | |||
# language_info: | |||
# codemirror_mode: | |||
# name: ipython | |||
# version: 3 | |||
# file_extension: .py | |||
# mimetype: text/x-python | |||
# name: python | |||
# nbconvert_exporter: python | |||
# pygments_lexer: ipython3 | |||
# version: 3.5.2 | |||
# --- | |||
# # 混淆矩阵(confusion matrix) | |||
# | |||
# 混淆矩阵是用来总结一个分类器结果的矩阵。对于k元分类,其实它就是一个$k \times k$的表格,用来记录分类器的预测结果。 | |||
# | |||
# 对于最常见的二元分类来说,它的混淆矩阵是2乘2的,如下 | |||
#  | |||
# | |||
# * `TP` = True Postive = 真阳性 | |||
# * `FP` = False Positive = 假阳性 | |||
# * `FN` = False Negative = 假阴性 | |||
# * `TN` = True Negative = 真阴性 | |||
# | |||
# 你要的例子来了。。。比如我们一个模型对15个样本进行预测,然后结果如下。 | |||
# | |||
# * 预测值:1 1 1 1 1 0 0 0 0 0 1 1 1 0 1 | |||
# * 真实值:0 1 1 0 1 1 0 0 1 0 1 0 1 0 0 | |||
# | |||
#  | |||
# | |||
# | |||
# 这个就是混淆矩阵。混淆矩阵中的这四个数值,经常被用来定义其他一些度量。 | |||
# | |||
# | |||
# ### 准确度 | |||
# ``` | |||
# Accuracy = (TP+TN) / (TP+TN+FN+TN) | |||
# ``` | |||
# | |||
# 在上面的例子中,准确度 = (5+4) / 15 = 0.6 | |||
# | |||
# | |||
# | |||
# ### 精度(precision, 或者PPV, positive predictive value) | |||
# ``` | |||
# precision = TP / (TP + FP) | |||
# ``` | |||
# 在上面的例子中,精度 = 5 / (5+4) = 0.556 | |||
# | |||
# | |||
# | |||
# ### 召回(recall, 或者敏感度,sensitivity,真阳性率,TPR,True Positive Rate) | |||
# | |||
# ``` | |||
# recall = TP / (TP + FN) | |||
# ``` | |||
# | |||
# 在上面的例子中,召回 = 5 / (5+2) = 0.714 | |||
# | |||
# | |||
# | |||
# ### 特异度(specificity,或者真阴性率,TNR,True Negative Rate) | |||
# ``` | |||
# specificity = TN / (TN + FP) | |||
# ``` | |||
# | |||
# 在上面的例子中,特异度 = 4 / (4+2) = 0.667 | |||
# | |||
# | |||
# | |||
# ### F1-值(F1-score) | |||
# ``` | |||
# F1 = 2*TP / (2*TP+FP+FN) | |||
# ``` | |||
# 在上面的例子中,F1-值 = 2*5 / (2*5+4+2) = 0.625 | |||
# | |||
# | |||
# |