欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

基于python对密立根油滴实验数据处理改进

程序员文章站 2022-05-18 17:19:16
...

基于python对密立根油滴实验数据处理改进

近似最大公约数

import math
ac=1.34
def accuracy(x, y):
    return math.fabs(x - y) < ac
def gcd(x, y):
    if accuracy(x, 0):
        return y
    if accuracy(y, 0):
        return x
    return gcd(y, x%y)

b=[3.2,3.2,6.4,4.8,8.0,9.6]
a=[1.60217663410,2*1.60217663410,3*1.60217663410,4*1.60217663410,5*1.60217663410,6*1.60217663410]
c=[7.88,6.25,3.11,3.18,4.57,6.29]
d=[9.36,3.16,3.41,10.78,4.90,3.11]
x=[7.88,6.25,3.11,3.18,4.57,6.29]
f=[2*1.60217663410,100,3*1.60217663410,4*1.60217663410,5*1.60217663410,6*1.60217663410]
n=6
e=2*1.60217663410
for j in range(0,10):
    for i in range(1,n):
        e=gcd(e, a[i])
    for i in range(1,n):
        if(a[i]%e>10):
            del a[i]
            n=n-1  
print(e)

梯度下降拟合“倒过来验证”方法

import numpy as np
import matplotlib.pyplot as plt

N = 6 
x = np.array([5,4,2,2,3,4]).reshape(N,1)       
y = np.array([7.88,6.25,3.11,3.18,4.57,6.29]).reshape(N,1) 
plt.scatter(x, y)
ones = np.ones((N, 1))
x = np.hstack((x, ones))

def CostGradient(theta, x, y):
    diff = np.dot(x, theta) - y
    gradient = (1./N)*(np.dot(np.transpose(x), diff))
    return gradient

def Iteration():
    alpha = 0.001
    theta = np.array([1.6, 0]).reshape(2, 1)         
    gradient = CostGradient(theta, x, y)
    epsilon = 0.001
    while np.linalg.norm(gradient) > epsilon:
        theta = theta - alpha * gradient
        gradient = CostGradient(theta, x, y)
    return theta

Right_theta = Iteration()
print('下降后e值为:')
print('e=',Right_theta[0][0])
print('拟合函数为:')
print('y=',Right_theta[0][0],'x')

x1 = np.linspace(1, 10, 100)
y1 = Right_theta[0]*x1 + Right_theta[1]
plt.plot(x1, y1)
plt.show()