import numpy as npimport matplotlib.pyplot as pltfrom mpl_toolkits.mplot3d import Axes3D# To create a vector use python list or a numpy arrayv2 = [3,-2]v3 = [4,-3,2]
#transposingv3t = np.transpose(v3)print(v3t)# if they're alr numpy arrays you can just v3.Tplt.plot([0,v2[0]], [0,v2[1]])# x coords: [0, v2[0]] = [0, 3], meaning the x-axis starts at 0 and goes to 3# y coordinates: [0, v2[1]] = [0, -2], meaning the y-axis starts at 0 and goes to -2# This effectively draws a line from the origin [0, 0] to the point [3, -2]""" === remaining things to make the graph look pretty === """# ensures the x and y scares are equalplt.axis('equal') # plotting x = -4 to x = 4, k = black, -- = dashedplt.plot([-4,4],[0,0],'k--')# plotting y = -4 to y = 4plt.plot([0,0],[-4,4],'k--')plt.grid()# Sets the axis limits so that the x and y range from -4 to 4 (first 2 - x, last 2 - y)plt.axis((-4,4,-4,4))plt.show()
# plotting the 3D vectorfig = plt.figure(figsize=plt.figaspect(1))ax = fig.add_subplot(111, projection='3d')ax.plot([0,v3[0]],[0,v3[1]],[0,v3[2]], linewidth=3)# making the plot look nicerax.plot([0,0],[0,0],[-4,4],'k--')ax.plot([0,0],[-4,4],[0,0],'k--')ax.plot([-4,4],[0,0],[0,0],'k--')
v1 = np.array([-3,1])l = -0.3v2 = v1 * lplt.plot([0,v1[0]], [0,v1[1]], 'b', label='v1')plt.plot([0,v2[0]], [0,v2[1]], 'r', label='v2')plt.axis('square')# getting whatever is the largest from the elems in the vectors and * 1.5axlim = max([abs(max(v1)), abs(max(v2))]) * 1.5print(axlim)plt.axis((-axlim, axlim, -axlim, axlim))plt.grid()plt.legend();
v1 = [2,4,-3]v2 = [0,-3,-3]dot_prod = np.dot(v1,v2)v1_mag = np.linalg.norm(v1)v2_mag = np.linalg.norm(v2)theta = np.arccos(dot_prod / (v1_mag * v2_mag))print(f"theta: {theta} radians") # approximately 90 degrees# plotting the 3D vectorfig = plt.figure()ax = fig.add_subplot(111, projection='3d')ax.plot([0,v1[0]],[0,v1[1]],[0,v1[2]], linewidth=3)ax.plot([0,v2[0]],[0,v2[1]],[0,v2[2]], linewidth=3);plt.axis((-6,6,-6,6)) # only limiting the x and yplt.show()
Proving the Cauchy-Schwarz inequality
For dependent vectors (linear combination exist)
Equality: ∣aTb∣=∣∣a∣∣∣∣b∣∣
For independent vectors (linear combination doesn’t exist)
Inequality: ∣aTb∣≤∣∣a∣∣∣∣b∣∣
# solution in the coursea = np.random.randn(5)b = np.random.randn(5) # astronomically unlikely they are dependent => b and a are independent vectors!!c = np.random.randn(1) * a # scalar times a -> c and a are dependent vectors!!dot = np.dot(a,b)mag_prod = np.linalg.norm(a) * np.linalg.norm(b)# demonstarting inequality |a^T b| <= ||a||||b|| with independent setprint(np.abs(dot) == mag_prod)print(f"RHS:{np.abs(dot)}")print(f"LHS:{mag_prod}")# demonstrating equality |a^T b| = ||a||||b|| with dependent setdot2 = np.dot(a,c)mag_prod2 = np.linalg.norm(a) * np.linalg.norm(c)print(np.round(np.abs(dot2),4) == np.round(mag_prod2, 4)) # round to prevent precision errorprint(f"RHS:{np.abs(dot2)}")print(f"LHS:{mag_prod2}")"""Results"""#False#RHS:0.18071521267290253#LHS:2.112102692817962#True#RHS:1.4376900820291676#LHS:1.4376900820291676