The laser hits a small moving target

Prototype for a mosquitoes zapper made with :
– laser diode + alim for blu-ray burner 405 nm (DANGEROUS !!!)
– galvanometers to move the laser
– webcam
– DAC + ampli ops
– usb/spi converters
– relay to switch on/off laser
– linux computer + opencv software
I have to test with mosquitoes 🙂 (and glasses)
On the video, a small piece of black paper on a wire nilon is moving.

The main python code below :

[python]
 »’

O———————-> X (up to 1280)
|
|
|
| L
|
| C
|
Y
(up to 720)

L laser (Xl,Yl)
C cible (Xc,Yc)

Depx=Xc-Xl
Depy=Yc-Yl

 »’

import math,sys,os,cv,time
import Queue,threading
import numpy as np
import signal

USE_LASER=True

DETECT_VISAGE=False

CAMERA=1 # 0 cam internal, 1 cam external

CONF=’1280×720′

# Size, colors parameters
SIZETOKEEP=400

# colors to ignore to ignore spot laser
H=100
S=10
V=255

USB2SPI=’/home/karim/perso/elec/moustiques/soft/WRITEf ‘
LASER=’/home/karim/perso/elec/moustiques/soft/laser ‘
LED=’/home/karim/perso/elec/moustiques/soft/led ‘

if CONF == ‘1280×720’ :
# violet
# position laser au centre de l image
Xl_center=641
Yl_center=354
ADCy_center=1137
ADCx_center=1466

# extremites image que peut atteindre le laser
### y
Yl_maxhaut=170
ADCy_maxhaut=1
Yl_minbas=713
ADCy_minbas=2937

### x
Xl_mingauche=268
ADCx_mingauche=4081
Xl_maxdroite=853
ADCx_maxdroite=2
else:
# violet
# position laser au centre de l image
Xl_center=641
Yl_center=360
ADCy_center=1052
ADCx_center=1409

# extremites image que peut atteindre le laser
### y
Yl_maxhaut=162
ADCy_maxhaut=2
Yl_minbas=699
ADCy_minbas=2835

### x
Xl_mingauche=243
ADCx_mingauche=4080
Xl_maxdroite=854
ADCx_maxdroite=1

# nb adc / pixel X
pasx=math.fabs(1.0*(ADCx_maxdroite-ADCx_mingauche)/(Xl_maxdroite-Xl_mingauche))
#print ADCx_maxdroite-ADCx_mingauche,Xl_maxdroite-Xl_mingauche
#print ‘pasx=’,pasx

def move_x(Xp):
ADCp=ADCx_center-(Xp-Xl_center)*pasx
#print ‘ADCp=’,ADCp
com=USB2SPI+" 0 "+str(int(ADCp))
#print com
res=os.system(com)
if res != 0 :
print ‘Can not run ‘+com
sys.exit(1)

# nb adc / pixel Y
pasy=math.fabs(1.0*(ADCy_minbas-ADCy_maxhaut)/(Yl_minbas-Yl_maxhaut))

def move_y(Yp):
ADCp=ADCy_center+(Yp-Yl_center)*pasy
#print ‘ADCp=’,ADCp
com=USB2SPI+" 1 "+str(int(ADCp))
#print com
res=os.system(com)
if res != 0 :
print ‘Can not run ‘+com
sys.exit(1)

def inside(p, r): # p in rect
(rx, ry), (rw, rh) = r
(px, py) = p
print p,’ is inside ‘,r
return px > rx and py > ry and rx + rw > px and ry + rh > py

#
# BBoxes must be in the format:
# ( (topleft_x), (topleft_y) ), ( (bottomright_x), (bottomright_y) ) )
top = 0
bottom = 1
left = 0
right = 1

def merge_collided_bboxes( bbox_list ):
# For every bbox…
for this_bbox in bbox_list:

# Collision detect every other bbox:
for other_bbox in bbox_list:
if this_bbox is other_bbox: continue # Skip self

# Assume a collision to start out with:
has_collision = True

# These coords are in screen coords, so > means
# "lower than" and "further right than". And <
# means "higher than" and "further left than".

# We also inflate the box size by 10% to deal with
# fuzziness in the data. (Without this, there are many times a bbox
# is short of overlap by just one or two pixels.)
if (this_bbox[bottom][0]*1.1 < other_bbox[top][0]*0.9): has_collision = False
if (this_bbox[top][0]*.9 > other_bbox[bottom][0]*1.1): has_collision = False

if (this_bbox[right][1]*1.1 < other_bbox[left][1]*0.9): has_collision = False
if (this_bbox[left][1]*0.9 > other_bbox[right][1]*1.1): has_collision = False

if has_collision:
# merge these two bboxes into one, then start over:
top_left_x = min( this_bbox[left][0], other_bbox[left][0] )
top_left_y = min( this_bbox[left][1], other_bbox[left][1] )
bottom_right_x = max( this_bbox[right][0], other_bbox[right][0] )
bottom_right_y = max( this_bbox[right][1], other_bbox[right][1] )

new_bbox = ( (top_left_x, top_left_y), (bottom_right_x, bottom_right_y) )

bbox_list.remove( this_bbox )
bbox_list.remove( other_bbox )
bbox_list.append( new_bbox )

# Start over with the new list:
return merge_collided_bboxes( bbox_list )

# When there are no collions between boxes, return that list:
return bbox_list

class WorkerDraw(threading.Thread):

def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue

def run(self):

while True:
point = self.queue.get()
print ‘=========> point =’,point

x,y=point[‘point’][0],point[‘point’][1]

# move laser
move_x(x)
move_y(y)

# switch on
com=LASER+’ 1′
res=os.system(com)
#res=0
if res != 0 :
print ‘Can not run ‘+com
sys.exit(1)

time.sleep(0)

# switch off
com=LASER+’ 0′
res=os.system(com)
#res=0
if res != 0 :
print ‘Can not run ‘+com
sys.exit(1)

self.queue.task_done()

class Target:
def __init__(self):
self.capture = cv.CaptureFromCAM(CAMERA)

if CONF == ‘1280×720’:
cv.SetCaptureProperty(self.capture,cv.CV_CAP_PROP_FRAME_WIDTH,1280)
cv.SetCaptureProperty(self.capture, cv.CV_CAP_PROP_FRAME_HEIGHT, 720)
cv.SetCaptureProperty(self.capture, cv.CV_CAP_PROP_FPS, 60)
time.sleep(2)

cv.NamedWindow("Target", CAMERA)

def detect_faces(self, image, haar_cascade, mem_storage ):

faces = []
image_size = cv.GetSize( image )
#for ((x, y, w, h), n) in faces:
#The first 4 numbers are the x,y location of the top-left point, and the height, width of the bounding box
#faces = cv.HaarDetectObjects(grayscale, haar_cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING, (20, 20) )
#faces = cv.HaarDetectObjects(image, haar_cascade, storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING )
#faces = cv.HaarDetectObjects(image, haar_cascade, storage )
#faces = cv.HaarDetectObjects(image, haar_cascade, mem_storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING, ( 16, 16 ) )
#faces = cv.HaarDetectObjects(image, haar_cascade, mem_storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING, ( 4,4 ) )
faces = cv.HaarDetectObjects(image, haar_cascade, mem_storage, 1.2, 2, cv.CV_HAAR_DO_CANNY_PRUNING, ( image_size[0]/10, image_size[1]/10) )

for face in faces:
box = face[0]
cv.Rectangle(image, ( box[0], box[1] ),
( box[0] + box[2], box[1] + box[3]), cv.RGB(124, 252, 0), 1, 8, 0)
return faces

def run(self):

out_queue=Queue.Queue()

if DETECT_VISAGE == True :
# ## Face detection stuff
haar_cascade = cv.Load( ‘haarcascades/haarcascade_frontalface_default.xml’ )
# visage
#haar_cascade = cv.Load( ‘haarcascades/haarcascade_frontalface_alt.xml’ )
# haar_cascade = cv.Load( ‘haarcascades/haarcascade_frontalface_alt2.xml’ )
# haar_cascade = cv.Load( ‘haarcascades/haarcascade_mcs_mouth.xml’ )
#haar_cascade = cv.Load( ‘haarcascades/haarcascade_eye.xml’ )
# haar_cascade = cv.Load( ‘haarcascades/haarcascade_frontalface_alt_tree.xml’ )
# haar_cascade = cv.Load( ‘haarcascades/haarcascade_upperbody.xml’ )
# haar_cascade = cv.Load( ‘haarcascades/haarcascade_profileface.xml’ )

# Capture first frame to get size
frame = cv.QueryFrame(self.capture)

print ‘width=’,frame.width,’,height=’,frame.height,’,depth=’,frame.depth

#sys.exit(0)

frame_size = cv.GetSize(frame)

grey_image = cv.CreateImage(cv.GetSize(frame), cv.IPL_DEPTH_8U, 1)
moving_average = cv.CreateImage(cv.GetSize(frame), cv.IPL_DEPTH_32F, 3)
difference = None

t = WorkerDraw(out_queue)
t.start()

prev_x=0
prev_y=0

t0= time.time()
Process=False

# loop over images
while True:

onFace=False

# Capture frame from webcam
color_image = cv.QueryFrame(self.capture)

### cam limits
pt1=(Xl_mingauche,Yl_maxhaut)
pt2=(Xl_maxdroite,Yl_minbas)
cv.Rectangle(color_image, pt1, pt2, cv.CV_RGB(255,0,0), 1)

t1 = time.time()

#######################################
hsv_img = cv.CreateImage(cv.GetSize(color_image), 8, 3)
hsv = cv.CvtColor(color_image, hsv_img,cv.CV_BGR2HSV)

# Smooth to get rid of false positives
cv.Smooth(color_image, color_image, cv.CV_GAUSSIAN, 3, 0)
if not difference:
#print ‘no diff’
# Initialize
difference = cv.CloneImage(color_image)
temp = cv.CloneImage(color_image)
cv.ConvertScale(color_image, moving_average, 1.0, 0.0)
else:
#print ‘diff’
cv.RunningAvg(color_image, moving_average, 0.020, None)

# Convert the scale of the moving average.
cv.ConvertScale(moving_average, temp, 1.0, 0.0)

# Minus the current frame from the moving average.
cv.AbsDiff(color_image, temp, difference)

# Convert the image to grayscale.
cv.CvtColor(difference, grey_image, cv.CV_RGB2GRAY)

# Convert the image to black and white.
cv.Threshold(grey_image, grey_image, 70, 255, cv.CV_THRESH_BINARY)

# Dilate and erode to get object blobs
cv.Dilate(grey_image, grey_image, None, 18)
cv.Erode(grey_image, grey_image, None, 10)

# Calculate movements
storage = cv.CreateMemStorage(0)

if DETECT_VISAGE == True :
# detect objects
faces=self.detect_faces( color_image, haar_cascade, storage )

contour = cv.FindContours(grey_image, storage, cv.CV_RETR_CCOMP, cv.CV_CHAIN_APPROX_SIMPLE)
points = [] # Was using this to hold either pixel coords or polygon coords.
bounding_box_list = []
while contour:
bounding_rect = cv.BoundingRect( list(contour) )
point1 = ( bounding_rect[0], bounding_rect[1] )
point2 = ( bounding_rect[0] + bounding_rect[2], bounding_rect[1] + bounding_rect[3] )

bounding_box_list.append( ( point1, point2 ) )
polygon_points = cv.ApproxPoly( list(contour), storage, cv.CV_POLY_APPROX_DP )

contour = contour.h_next()

# Find the average size of the bbox (targets), then
# remove any tiny bboxes (which are prolly just noise).
# "Tiny" is defined as any box with 1/10th the area of the average box.
# This reduces false positives on tiny "sparkles" noise.
box_areas = []
for box in bounding_box_list:
box_width = box[right][0] – box[left][0]
box_height = box[bottom][0] – box[top][0]
box_areas.append( box_width * box_height )

#cv.Rectangle( color_image, box[0], box[1], cv.CV_RGB(255,0,0), 1)

average_box_area = 0.0
if len(box_areas):
average_box_area = float( sum(box_areas) ) / len(box_areas)

trimmed_box_list = []
for box in bounding_box_list:
box_width = box[right][0] – box[left][0]
box_height = box[bottom][0] – box[top][0]

# Only keep the box if it’s not a tiny noise box:

if (box_width * box_height)-SIZETOKEEP > 200 :
print ‘ignore size’
else:
print ‘size=’,box_width * box_height

if (box_width * box_height) > average_box_area*0.1 and math.fabs((box_width * box_height)-SIZETOKEEP) < 200 :
#if (box_width * box_height) > average_box_area*0.1 :
trimmed_box_list.append( box )

# Draw the trimmed box list:
#for box in trimmed_box_list:
# cv.Rectangle( color_image, box[0], box[1], cv.CV_RGB(0,255,0), 2 )

bounding_box_list = merge_collided_bboxes( trimmed_box_list )

# Draw the merged box list:
center_points=[]
for box in bounding_box_list:
#cv.Rectangle( color_image, box[0], box[1], cv.CV_RGB(0,255,0), 1 )
x=int((box[0][0]+box[1][0])/2.0)
y=int((box[0][1]+box[1][1])/2.0)
center_point=(x,y)
#print center_point
center_points.append( center_point )
#cv.Circle(color_image, center_point, 20, cv.CV_RGB(255, 255,255), 1)
#cv.Circle(color_image, center_point, 15, cv.CV_RGB(100, 255, 255), 1)
#cv.Circle(color_image, center_point, 10, cv.CV_RGB(255, 255, 255), 2)
#cv.Circle(color_image, center_point, 5, cv.CV_RGB(100, 255, 255), 3)

(h,s,v,r)=cv.Get2D(hsv_img,y,x)

if DETECT_VISAGE == True :
p=(x,y)
for f in faces:
r=((f[0][0],f[0][1]),(f[0][2],f[0][3]))
if inside(p,r):
print ‘Point inside face !!!!!!!’
onFace=True

if t1-t0 > 30 and Process == False: # 30s le temps de chauffer
Process=True
print ‘Processing !’

if x < Xl_mingauche or x > Xl_maxdroite or y > Yl_minbas or y < Yl_maxhaut :
print ‘ignore bounds’

if math.fabs(H-h) <50 and math.fabs(S-s) < 50 :
print ‘ignore color’

if out_queue.empty() and onFace==False and Process == True and USE_LASER == True and (math.fabs(H-h) > 50 or math.fabs(S-s) > 50 ) and (x > Xl_mingauche and x < Xl_maxdroite and y < Yl_minbas and y > Yl_maxhaut):
#if out_queue.empty() and Process == True and (x > Xl_mingauche and x < Xl_maxdroite and y < Yl_minbas and y > Yl_maxhaut) :
print ‘ADDED’,’x=’,x,’y=’,y,’H=’,h,’S=’,s,’V=’,v
out_queue.put({‘point’: (x,y)})
#cv.SaveImage(‘target_x’+str(x)+’_y’+str(y)+’_H’+str(h)+’_S’+str(s)+’_V’+str(v)+’.jpg’,color_image)
cv.Rectangle( color_image, box[0], box[1], cv.CV_RGB(0,255,0), 1)
cv.Circle(color_image, center_point, 20, cv.CV_RGB(255, 255,255), 1)
cv.Circle(color_image, center_point, 15, cv.CV_RGB(100, 255, 255), 1)
cv.Circle(color_image, center_point, 10, cv.CV_RGB(255, 255, 255), 2)
cv.Circle(color_image, center_point, 5, cv.CV_RGB(100, 255, 255), 3)

estimated_target_count = len( bounding_box_list )
print ‘estimated_target_count=’,estimated_target_count

# Display frame to user

cv.ShowImage("Target", color_image)

# Listen for ESC or ENTER key
# ### il faut avoir le focus sur l image
c = cv.WaitKey(1) % 0x100
if c == 27 or c == 10:
com=LASER+’ 0′
res=os.system(com)
if res != 0 :
print ‘Can not run ‘+com
com=LED+’ 0′
res=os.system(com)
if res != 0 :
print ‘Can not run ‘+com
os.kill(os.getpid(), signal.SIGKILL)
#break

if __name__=="__main__":

com=LED+’ 1′
res=os.system(com)
if res != 0 :
print ‘Can not run ‘+com

# start
t = Target()
#import cProfile
#cProfile.run( ‘t.run()’ )
t.run()

[/python]

Add a red led to warn that the mosquitoes hunter is on

When log in :

debarm:~# cat .bashrc

echo « toto »
python /root/new/led_on.py&

debarm:~# cat /root/new/led_on.py
from ablib import Pin
import time
led = Pin(‘W16′,’OUTPUT’)
led.on()

When log out :

debarm:~# cat .bash_logout
python /root/new/led_off.py

debarm:~# cat /root/new/led_off.py
from ablib import Pin
import time
led = Pin(‘W16′,’OUTPUT’)
led.off()

GPS + aria G25

GPS module
GPS module

 

20150111_150432

 

20150111_150444

 

Set date :
date -s « 11 JAN 2014 13:46:00 »
hwclock –systohc

Install CRON : apt-get install cron

root@acmeboard:~# cat check.sh gps.py

#!/bin/sh
SERVICE='gps.py'

if ps ax | grep -v grep | grep $SERVICE > /dev/null
then
echo "$SERVICE service running, everything is fine"
else
echo "$SERVICE is not running"
cd /root/
nohup python ./$SERVICE > /dev/null &
fi

[python]
import serial

import logging
import logging.handlers

# set up logging to file – see previous section for more details
logging.basicConfig(level=logging.DEBUG,
format=’%(asctime)s %(name)-12s %(levelname)-8s %(message)s’,
datefmt=’%y-%m-%d %H:%M’,
)

log = logging.getLogger(‘toto’)

formatter = logging.Formatter(‘%(asctime)s %(name)-12s %(levelname)-8s %(message)s’)
handler = logging.handlers.TimedRotatingFileHandler(‘test.log’,’midnight’,1)
handler.setFormatter(formatter)
log.addHandler(handler)

ser = serial.Serial(‘/dev/ttyS4’, 9600, timeout=1)
print("connected to: " + ser.portstr)

while True:

line = ser.readline()
log.debug(str(line.strip()))
[/python]

root@acmeboard:~# crontab -l
*/2 * * * * /root/check.sh >> /root/toto

Convert NMEA lat, lon to decimal lat,lon :

NMEA Decimal
latitude 0302.78469 03 + (02.78469/60) = 3.046412
longitude 10141.82531 101 + 41.82531/60) = 101.6971

Follow the electrical consumption

It is made with ARIA G25 with ethernet and 1 serial port and 1 teleinfo module.

[python]

import serial
import datetime

import json
import urllib2

class SenseDevice:
base_url = ‘http://api.sen.se/events/’
data = []

def __init__(self, api_key):
self.api_key = api_key

def add_value(self, feed_id, value):
self.data.append({
‘feed_id’: feed_id,
‘value’: value
})

def publish(self):
req = urllib2.Request(self.base_url)
req.add_header(‘sense_key’, self.api_key)
req.add_header(‘content-type’, ‘application/json’)

req.add_data(json.dumps(self.data).encode(‘utf-8’))
res = urllib2.urlopen(req)
self.data = []
return res

t0=datetime.datetime.now()

ser = serial.Serial(‘/dev/ttyS4’, 9600, timeout=1)
print("connected to: " + ser.portstr)

a=SenseDevice(‘XXXXXXXXXXXX’)
while True:
# Read a line and convert it from b’xxx\r\n’ to xxx
line = ser.readline()[:-2]
print line
if ‘HCHP’ in line:
val1=line.split()[1]

if ‘HCHC’ in line:
val2=line.split()[1]

t1=datetime.datetime.now()

if (t1-t0).seconds > 60 :
t0=t1
print datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),val1,val2
a=SenseDevice(‘XXXXXXXXXXXXXXXXXA’)
a.add_value(49711,float(val1)/1000.0)
a.add_value(54884,float(val2)/1000.0)
res=a.publish()
print res

ser.close()

[/python]

Recompile kernel for Aria G25 to have the DLINK WIFI USB dongle working

Update with kernel 3.16.1

On my laptop :

make ARCH=arm CROSS_COMPILE=arm-linux-gnueabi- acme-aria_defconfig

make ARCH=arm menuconfig

make ARCH=arm CROSS_COMPILE=arm-linux-gnueabi- acme-aria.dtb
make -j8 ARCH=arm CROSS_COMPILE=arm-linux-gnueabi- uImage
make modules -j8 ARCH=arm CROSS_COMPILE=arm-linux-gnueabi-
make modules_install INSTALL_MOD_PATH=./modules ARCH=arm

scp arch/arm/boot/dts/acme-aria.dtb root@192.168.1.41:/boot/at91-ariag25.dtb

scp arch/arm/boot/uImage root@192.168.1.41:/boot/image.bin

I had to install rsync on the aria g25 board (apt-get install rsync
rsync -avc modules/lib/ root@192.168.1.41:/lib/

Then reboot the acme card

once rebooted, do

depmod -a

apt-get install firmware-realtek
echo 2001 330d > /sys/bus/usb/drivers/rtl8192cu/new_id

I see with dmesg :

rtl8192cu: Chip version 0x11
rtl8192cu: MAC address: f8:e9:03:d5:03:8a
rtl8192cu: Board Type 0
rtl_usb: rx_max_size 15360, rx_urb_num 8, in_ep 1
rtl8192cu: Loading firmware rtlwifi/rtl8192cufw_TMSC.bin
usb 2-1: Direct firmware load failed with error -2
usb 2-1: Falling back to user helper
rtlwifi: Loading alternative firmware rtlwifi/rtl8192cufw.bin
ieee80211 phy0: Selected rate control algorithm ‘rtl_rc’
rtlwifi: wireless switch is on

root@acmeboard:~# cat /proc/net/wireless
Inter-| sta-| Quality | Discarded packets | Missed | WE
face | tus | link level noise | nwid crypt frag retry misc | beacon | 22
wlan0: 0000 0 0 0 0 0 0 0 0 0

apt-get install wireless-tools

and process as below to set the /etc/network/interfaces

then

/etc/init.d/networking restart

To have this ok at boot, add the 2 lines below in /etc/rc.local

echo 2001 330d > /sys/bus/usb/drivers/rtl8192cu/new_id

/etc/init.d/networking restart

————————————————————–

Following instructions from :

http://www.acmesystems.it/dlink_wifi

Add a USB connector to the board : http://www.acmesystems.it/ariag25_wirings

Recompile the kernel 2.6.39 (http://www.acmesystems.it/ariag25_compile_linux_2_6_39)

In linux-2.6.39/drivers/net/wireless/rtlwifi/rtl8192cu/sw.c, you have to add a line for your dongle :

/****** 8192CU ********/
{RTL_USB_DEVICE(0x0586, 0x341f, rtl92cu_hal_cfg)}, /*Zyxel -Abocom*/
{RTL_USB_DEVICE(0x07aa, 0x0056, rtl92cu_hal_cfg)}, /*ATKK-Gemtek*/
{RTL_USB_DEVICE(0x07b8, 0x8178, rtl92cu_hal_cfg)}, /*Funai -Abocom*/
{RTL_USB_DEVICE(0x07b8, 0x8178, rtl92cu_hal_cfg)}, /*Abocom -Abocom*/
{RTL_USB_DEVICE(0x2001, 0x3307, rtl92cu_hal_cfg)}, /*D-Link-Cameo*/
{RTL_USB_DEVICE(0x2001, 0x3309, rtl92cu_hal_cfg)}, /*D-Link-Alpha*/
{RTL_USB_DEVICE(0x2001, 0x330a, rtl92cu_hal_cfg)}, /*D-Link-Alpha*/
{RTL_USB_DEVICE(0x2001, 0x330d, rtl92cu_hal_cfg)}, /*D-Link-Alpha*/
{RTL_USB_DEVICE(0x2019, 0xab2b, rtl92cu_hal_cfg)}, /*Planex -Abocom*/
{RTL_USB_DEVICE(0x7392, 0x7822, rtl92cu_hal_cfg)}, /*Edimax -Edimax*/
{}

You need rtl8192cufw.bin  in  /lib/firmware/rtlwifi/ (it is not installed when you install the package firmware-realtek)

In /etc/network/interfaces (WEP key) :


auto wlan0
iface wlan0 inet dhcp
wireless-essid Freebox-XXXX
wireless-key XXXXXX

The Wifi USB dongle must not be too far from the access point.

You got :

...
sb 2-1: new full speed USB device number 2 using at91_ohci
usb 2-1: not running at top speed; connect to a high speed hub
usb 2-1: New USB device found, idVendor=2001, idProduct=330d
usb 2-1: New USB device strings: Mfr=1, Product=2, SerialNumber=3
usb 2-1: Product: 802.11n WLAN Adapter
usb 2-1: Manufacturer: Realtek
usb 2-1: SerialNumber: 00e04c000001
rtl8192cu: MAC address: f8:e9:03:d5:03:8a
rtl8192cu: Board Type 0
rtl8192cu: rx_max_size 15360, rx_urb_num 8, in_ep 1
ieee80211 phy0: Selected rate control algorithm 'rtl_rc'
rtl8192cu 2-1:1.0: wlan0: Features changed: 0x00004800 -> 0x00004000
....
rtl8192cu: MAC auto ON okay!
rtl8192cu: Tx queue select: 0x05
rtl8192cu: Loading firmware file rtlwifi/rtl8192cufw.bin
wlan0: authenticate with f4:ca:e5:ab:22:2c (try 1)
wlan0: authenticated
wlan0: associate with f4:ca:e5:ab:22:2c (try 1)
wlan0: RX AssocResp from f4:ca:e5:ab:22:2c (capab=0x411 status=0 aid=3)
wlan0: associated
...

20141230_004649
Electronic circuit to follow my electrical consumption and publish the results on the net.

ARIA G25 board

The card comes from ACME.

I have wired the following components :

  • ethernet
  • usb port
  • microsd (for the OS Linux)
  • A GPIO output (for the relay)
  • the SPI bus (for the MCP4822)

Use a DAC MCP4822 with the ARIA G25

On the ARIA G25, I use the following C code to control the MCP4822 using the SPI interface :

#include <stdint.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <getopt.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <linux/types.h>
#include <linux/spi/spidev.h>

#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))

static void pabort(const char *s)
{
perror(s);
abort();
}

static const char *device = « /dev/spidev0.0 »;
static uint8_t mode = 3;
static uint8_t bits = 8;
static uint32_t speed = 1000000;
static uint16_t delay;

void affichebin16(unsigned n)
{
unsigned bit = 0 ;
unsigned mask = 1 ;
int i;
for (i = 0 ; i < 16 ; ++i)
{
bit = (n & mask) >> i ;
printf(« %d », bit) ;
mask <<= 1 ;
}
printf(« \n »);
}

void affichebin8(unsigned n)
{
unsigned bit = 0 ;
unsigned mask = 1 ;
int i;
for (i = 0 ; i < 8 ; ++i)
{
bit = (n & mask) >> i ;
printf(« %d », bit) ;
mask <<= 1 ;
}
printf(« \n »);
}

static void transfer(int fd, int sortie, int val)
{
int ret;

affichebin16(val);

uint16_t A;
if ( sortie == 1)
A=( val & 0b0000111111111111) | 0b0011000000000000;
else
A=( val & 0b0000111111111111) | 0b1011000000000000;
affichebin16(A);

uint8_t SH = (A & 0xFF00) >> 8; // MSB (bits 9 Ă  16)
uint8_t SL = A & 0x00FF; // LSB (bits 1 Ă  8)

printf(« bits 1 à 8 =\n »);
affichebin8(SL);
printf(« bits 9 à 16 =\n »);
affichebin8(SH);

uint8_t tx[] = {
//5V
SH,SL
// 2048 2.5
//0x38,0x00
//0V
//0x30,0x00
};

uint8_t rx[ARRAY_SIZE(tx)] = {0, };

struct spi_ioc_transfer tr = {
.tx_buf = (unsigned long)tx,
.rx_buf = (unsigned long)rx,
.len = ARRAY_SIZE(tx),
.delay_usecs = delay,
.speed_hz = 0,
.bits_per_word = 0,
};

ret = ioctl(fd, SPI_IOC_MESSAGE(1), &tr);
if (ret == 1)
pabort(« can’t send spi message »);

for (ret = 0; ret < ARRAY_SIZE(tx); ret++) {
if (!(ret % 6))
puts(«  »);
printf(« %.2X « , rx[ret]);
}
puts(«  »);
}

int main(int argc, char *argv[])
{
int ret = 0;
int fd;

int val=atoi(argv[2]);
int sortie=atoi(argv[1]);

printf(« val=%d\tsortie=%d\n »,val,sortie);

fd = open(device, O_RDWR);
if (fd < 0)
pabort(« can’t open device »);

ret = ioctl(fd, SPI_IOC_WR_MODE, &mode);
if (ret == -1)
pabort(« can’t set spi mode »);

ret = ioctl(fd, SPI_IOC_RD_MODE, &mode);
if (ret == -1)
pabort(« can’t get spi mode »);

ret = ioctl(fd, SPI_IOC_WR_BITS_PER_WORD, &bits);
if (ret == -1)
pabort(« can’t set bits per word »);

ret = ioctl(fd, SPI_IOC_RD_BITS_PER_WORD, &bits);
if (ret == -1)
pabort(« can’t get bits per word »);

ret = ioctl(fd, SPI_IOC_WR_MAX_SPEED_HZ, &speed);
if (ret == -1)
pabort(« can’t set max speed hz »);

ret = ioctl(fd, SPI_IOC_RD_MAX_SPEED_HZ, &speed);
if (ret == -1)
pabort(« can’t get max speed hz »);

printf(« spi mode: %d\n », mode);
printf(« bits per word: %d\n », bits);
printf(« max speed: %d Hz (%d KHz)\n », speed, speed/1000);

transfer(fd,sortie,val);

close(fd);

return ret;
}

Once compiled the parameters are the output (0 or 1) and the byte to convert.