reinvent the wheel – Super minimal implementation for inference only from a connected neural network in Python + Numpy

There are many neural network frameworks available, but to understand how things work, internal reimplementation can be a good exercise.

In this case, the goal is to develop a super-minimal neural network both in terms of dependencies and characteristics in the Python language.

In terms of dependencies, let's just use NumPy

In terms of features, for now, let's just focus on the inference (without training, basically assume that the weights are precalculated) and in a fully connected 3-layer architecture (input layer, latent layer, and output layer)

The proposed solution consists of a class with
– constructor, where it is possible to specify the number of neurons and the transfer function
– The direct method that makes the inference.





import numpy as np
import mathematics

# The transfer function is the typical sigmoid
sigmoid def (x):
returns 1 / (1 + math.exp (-x))


# The only relevant aspects of the input and output vectors for the NN architecture are the forms, since they define the number of weights
x = np.random.rand (1,4)
y = np.random.rand (1,1)


NN class:
def __init __ (self, N, tf, x, y):
# Number of neurons
self.N = N

# Transfer function
self.tf = np.vectorize (tf)

# Pesos
self.weights_in = np.random.rand (x.shape[1], auto.N)
self.weights_out = np.random.rand (self.N, y.shape[0])

def forward (auto, x):
self.layer1 = self.tf (np.dot (x, self.weights_in))
return self.tf (np.dot (self.layer1, self.weights_out))

def to_str (self):
return "Input Weights  n" + np.array2string (self.weights_in) + " n Output Weights  n" + np.array2string (self.weights_out)


# Test


temp = NN (5, sigmoid, x, y)

temp.forward (x)

Output (it's a random number)

training([[0.84957092]])

Python – The Bokeh application behaves strangely.

Below is the code for a work fragment.

This is what is strange about behavior:

  • Touching the button frequently freezes the application.
  • The lower right group never contributes to the histogram when it is selected.

We appreciate any comments about the code.

import numpy as np
import pandas like pd

From bokeh.layouts we import gridplot
from bokeh.models import BoxSelectTool, LassoSelectTool
of bokeh.ploting import figure, curdoc
from bokeh.io import output_file, show
from bokeh.models.widgets import button
from bokeh.models import ColumnDataSource
from bokeh.models import DataRange1d

button = Button (label = "Plot Points and remake histogram", button_type = "success")

def generated ():
x global
global and

size1 = np.random.randint (1,1000, size = 1)[0]
    size2 = np.random.randint (1,1000, size = 1)[0]
    size3 = np.random.randint (1,1000, size = 1)[0]

    # create three samples of normal population with different parameters
x1 = np.random.normal (loc = 5.0, size = size1) * 100; y1 = np.random.normal (loc = 10.0, size = size1) * 10
x2 = np.random.normal (loc = 5.0, size = size2) * 50; y2 = np.random.normal (loc = 5.0, size = size2) * 10
x3 = np.random.normal (loc = 55.0, size = size3) * 10; y3 = np.random.normal (loc = 4.0, size = size3) * 10

x = np.concatenate ((x1, x2, x3))
y = np.concatenate ((y1, y2, y3))


generate ()


TOOLS = "pan, wheel_zoom, box_select, lasso_select, reset"

p = figure (tools = TOOLS, plot_width = 700, plot_height = 600, min_border = 10, min_border_left = 50,
toolbar_location = "above", x_axis_location = None, y_axis_location = None,
title = "Dispersion diagram")

pointchart_source = ColumnDataSource (pd.DataFrame ({& # 39; x & # 39 ;: x, & # 39; and & # 39 ;: and}))
r = p.scatter (x = & # 39; x & # 39 ;, y = & # 39; and & # 39 ;, size = 3, color = "# 3A5785", alpha = 0.6, font = pointchart_source)


# create histogram
hhist, hedges = np.histogram (x, bins = 10) #Set bin binebr here
hzeros = np.zeros (len (hedges) -1)
hmax = max (hhist) * 1.1

LINE_ARGS = dict (color = "# 3A5785", line_color = None)

ph = figure (toolbar_location = None, plot_width = 600, plot_height = 300, x_range = p.x_range,
y_range = (- hmax, hmax), min_border = 10, min_border_left = 50, and_axis_location = "right")

white_hist_glyph = ph.quad (bottom = 0, left = hedges[:-1], right = hedges[1:], above = hhist, color = "white", line_color = "# 3A5785")

blue_hist_glyph = ph.quad (lower = 0, left = coverages[:-1], right = hedges[1:], above = hzeros, alpha = 0.5, ** LINE_ARGS)


bins_count = 20

def on_selection_callback (attr, old, new):

x = pd.DataFrame (pointchart_source.data) .loc[new]['x']

    

    

    

    inds = new

allvalues ​​= pd.DataFrame (pointchart_source.data) .loc[:]['x']

    

    

    

    blue_hist_values, _ = np.histogram (x[inds], bins = white_hist_edges)
#lesson: set this in the original containers
#white_hist_edges
blue_hist_glyph.data_source.data["top"]   = blue_hist_values


r.data_source.selected.on_change (& # 39; indexes & # 39 ;, on_selection_callback)


def new_source_callback ():
global white_hist_edges

#Develops new global fixes for x and y
generate ()

# New plotted points (fails)
pointchart_source.data = ColumnDataSource (pd.DataFrame ({& # 39; x & # 39 ;: x, & # 39; and & # 39 ;: and})). data

allvalues ​​= pd.DataFrame (pointchart_source.data['x'])
white_hist_values, white_hist_edges = np.histogram (allvalues, bins = bins_count)
white_hist_glyph.data_source.data["top"] = white_hist_values

#blue_hist_values, _ = np.histogram (pointchart_source.data)['x'], bins = white_hist_edges)
blue_hist_glyph.data_source.data["top"] = np.zeros (len (white_hist_edges) -1)


ph.x_range = p.x_range

ph.y_range.start = -max (white_hist_values) * 1.1
ph.y_range.end = max (white_hist_values) * 1.1


button.on_click (new_source_callback)


layout = gridplot ([[p, ph, button]], merge_tools = False)
curdoc (). add_root (layout)
curdoc (). title = "Selection Histogram"


! powershell -command {& # 39; bokeh serve --show Histogram_test_v5.ipynb & # 39;}

deployment – Implement Python ML Model?

I want to automate / prototype the training of my classification model using sklearn with a "1 click execution" from the terminal where:

  1. connect to the source system
  2. read in untagged data
  3. perform preprocessing steps
  4. Train model of pipeline in train / test set
  5. see classification report
  6. export "pickle" model
  7. load persisted model
  8. read in untagged data
  9. apply predictions / probabilities in untagged data
  10. write the predicted results to csv / db

I have all the code mostly complete for this, but I wanted to divide these steps into possible Python modules so that it is not just a long script. I have read about the deployments of the Anaconda project and "in that"Y"principal"Python files, but I'm confused as a data scientist. Can anyone simplify this process / have recommendations or material so I can start and understand it better?

Thank you!

python 3.x – Dúvida em "continue" (SyntaxError: & # 39; continue & # 39; is not correctly in the loop)

Will I be in Python and will not I? Why did not I do it? You could do it?

def SecretNumber ():
GotIt = False
while GotIt == False:
One = int (entry ("Enter a number between 1 and 10:"))
Dos = int (entry ("Enter another number between 1 and 10:"))

yes (One> = 1) and (One <= 10):
    if (Two >= 1) and (Two <= 10):
print (& # 39; Your secret number is: & # 39; + str (One * Two))
GotIt = True
continue
plus:
print (& # 39; Second wrong value! & # 39;)
plus:
print ("First incorrect value!")
print ("Try again!")

***** File "cell_name", line 14
SyntaxError: & # 39; continue & # 39; it is not correctly in the loop *****

python – Divide a long line of json file into multiple lines

I'm trying to figure out how to split a JSON file from one line into a multiliner, after each comma.

I've tried using the division method, but it's not yet the output I'm looking for. This is how far I have come so far.

with open (& # 39; api.txt & # 39 ;, & # 39; w + & # 39;) as f:
api = res.read (). decode (& # 39; utf-8 & # 39;)
f.write (api)

new_api = str (api) .split (",")
with open (& # 39; new_api.txt & # 39 ;, & # 39; w + & # 39;) as n:
n.write (new_api)

I would like a result like this:

line1 "language": null

line2 "has_issues": true
line3 "has_projects": true
line4 "has_downloads": true
line5 "has_wiki": true
line6 "has_pages": false

mojave – Can not install python mysqlclient – OSError: mysql_config not found

I installed mysql through XAMPP.

When I try pip install mysqlclient I get this error

MBP-of-admin: django_project aanto $ pip install mysqlclient
Picking up mysqlclient
Using https://files.pythonhosted.org/packages/f4/f1/3bb6f64ca7a429729413e6556b7ba5976df06019a5245a43d36032f1061e/mysqlclient-1.4.2.post1.tar.gz
Complete exit of the python command setup.py egg_info:
/ bin / sh: mysql_config: command not found
Tracking (recent calls latest):
Case file "", line 1, in 
      File "/private/var/folders/jw/4xk541g14mq3ljnws61fbt_m0000gn/T/pip-install-0u41r1pb/mysqlclient/setup.py", line 16, in 
        metadata, options = get_config ()
File "/private/var/folders/jw/4xk541g14mq3ljnws61fbt_m0000gn/T/pip-install-0u41r1pb/mysqlclient/setup_posix.py", line 51, in get_config
libs = mysql_config ("libs")
File "/private/var/folders/jw/4xk541g14mq3ljnws61fbt_m0000gn/T/pip-install-0u41r1pb/mysqlclient/setup_posix.py", line 29, in mysql_config
increase EnvironmentError ("% s not found"% (_mysql_config_path,))
OSError: mysql_config not found

----------------------------------------
The command "python setup.py egg_info" failed with error code 1 in / private / var / folder / jw / 4xk541g14mq3ljnws61fbt_m0000gn / T / pip-install-0u41r1pb / mysqlclient /

How can I solve?

File with Python and NLTK

I am a beginner in Python and I am working with natural language processing. When processing a text and separating the sentences, I want to write in this way in the file << SENTENÇA >> <>< SENTENÇA >> …
But I do not know how to do it, I know how to print on the screen, but not how to write to the file in that format.

import nltk

import nltk.data


file = open (& # 39; base_setenciado.txt & # 39 ;, & # 39; w & # 39;)

raw_text = open (& # 39; base.txt & # 39;). read ()

sent_tokenizer nltk.data.load = (& # 39; tokenizers / Punkt / portuguese.pickle & # 39;)

sort = sent_tokenizer.tokenize (raw_text)

Spanish[500:505]:

print ("<<", sent, ">>  N ")

python – how to install the fluentd-logger package using poetry

my env

Python: 3.6.7
poetry: 1.0.0a2

reproducing

% poetry add fluency recorder
Using the version ^ 0.9.3 for fluent-logger
Updating dependencies
Resolving dependencies ... (0.0s)                                                                                                                                              

[SolverProblemError]
The current project must be compatible with the following versions of Python: ^ 3.6

Because no version of fluent-logger matches> 0.9.3,<0.10.0
  and fluent-logger (0.9.3) requires Python >= 2.7,! = 3.0,! = 3.1,! = 3.2,! = 3.3, <3.8, the fluency recorder is prohibited.

Then, because fluentd-integration depends on fluent-logger (^ 0.9.3), the resolution of the version failed. 

my pyproject.toml

% cat pyproject.toml
                                                                                                                [tool.poetry.dependencies]
python = "^ 3.6"
                                                                                                                                                             [tool.poetry.dev-dependencies]   

[build-system]



requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"                                                                                                                                          

I have checked logger-logger setup.py, python_requires = "> = 2.7,! = 3.0,! = 3.1,! = 3.2,! = 3.3, <3.8", Seems good.

python – ALGORITHM COMPLEXITY – Stack Overflow in Spanish

Hello, I have this program that calculates the EMIRP numbers and I am learning to calculate the algorithmic complexity but I am a bit lost, this is the code

import time
start = time.time ()

def prime_number (num, c_p = 0):
    for i in range (1, num + 1):
        if num% i == 0: c_p + = 1
        if c_p> 2: return 0
    return 1 if c_p == 2 else 0

def change (N):
    if (len (N)> = 2):
        return 1 if int (N[::-1])! = int (N) and prime_number (int (N[::-1])) == 1 else 0
    return 0

LP = [i for i in range(1,1001) if(prime_number(i)) == 1]
NLP = [print(i) for i in LP if(change(str(i))) == 1]
end = time.time ()
print (end-start)

How do I know the algorithmic complexity? I do not know if the correct thing is to be O (ABC)

Alibaba Cloud – Aliyun Python CreateInstance – can not be created with the Internet IP address

I have installed these 3 Python packages:

  • aliyun-python-sdk-core
  • aliyun-python-sdk-ecs
  • aliyun-python-sdk-vpc

CreateInstance attempt with an external IP address (Internet) but every time I only get a private IP address. How to create an instance with an external Internet IP address? When I run & # 39; DescribeInstances & # 39 ;, I get the answer from & # 39; VpcAttributes & # 39 ;, but how do I set & # 39; VpcAttributes & # 39; in CreateInstance since there is no such function in CreateInstance.py of the SDK?

Script:

import json
from aliyunsdkcore.client import AcsClient
from aliyunsdkcore.acs_exception.exceptions import ClientException
from aliyunsdkcore.acs_exception.exceptions import ServerException
from aliyunsdkecs.request.v20140526.DescribeInstancesRequest import DescribeInstancesRequest
from aliyunsdkecs.request.v20140526.DescribeUserDataRequest import DescribeUserDataRequest
from aliyunsdkecs.request.v20140526.CreateInstanceRequest import CreateInstanceRequest
from aliyunsdkecs.request.v20140526.StartInstanceRequest import StartInstanceRequest

# Create an AcsClient instance
client = AcsClient (
"LTblahaccesskey", ## your-access-key-id
"bGkeysecret", ## your-access-key-secret
"us-east-1") ## su-region-id

# Create an instance
request = CreateInstanceRequest ()
request.set_accept_format (& # 39; json & # 39;)
request.set_ImageId ("m-rjmyImageId838")
request.set_InstanceName ("myTestInstance")
request.set_SecurityGroupId ("sg-rmysecgroupid")
request.set_InstanceType ("ecs.sn2ne.large")
request.set_KeyPairName ("myKey")
request.set_InternetChargeType ("PayByTraffic")
request.set_VSwitchId ("vsw-rjmyswitchid")
request.set_InternetMaxBandwidthOut (5)
request.set_UserData ("Zm9vOmJhcg ==")

response = client.do_action_with_exception (request)
parsed = json.loads (answer)
print (json.dumps (analyzed, indentation = 3))
myInstanceId = analyzed['InstanceId']
print (& # 39; Instance Created: & # 39 ;, myInstanceId)