|
Lines 697-702
struct dma_filter {
Link Here
|
| 697 |
* paused. Returns 0 or an error code |
697 |
* paused. Returns 0 or an error code |
| 698 |
* @device_terminate_all: Aborts all transfers on a channel. Returns 0 |
698 |
* @device_terminate_all: Aborts all transfers on a channel. Returns 0 |
| 699 |
* or an error code |
699 |
* or an error code |
|
|
700 |
* @device_synchronize: Synchronizes the termination of a transfers to the |
| 701 |
* current context. |
| 700 |
* @device_tx_status: poll for transaction completion, the optional |
702 |
* @device_tx_status: poll for transaction completion, the optional |
| 701 |
* txstate parameter can be supplied with a pointer to get a |
703 |
* txstate parameter can be supplied with a pointer to get a |
| 702 |
* struct with auxiliary transfer status information, otherwise the call |
704 |
* struct with auxiliary transfer status information, otherwise the call |
|
Lines 781-786
struct dma_device {
Link Here
|
| 781 |
int (*device_pause)(struct dma_chan *chan); |
783 |
int (*device_pause)(struct dma_chan *chan); |
| 782 |
int (*device_resume)(struct dma_chan *chan); |
784 |
int (*device_resume)(struct dma_chan *chan); |
| 783 |
int (*device_terminate_all)(struct dma_chan *chan); |
785 |
int (*device_terminate_all)(struct dma_chan *chan); |
|
|
786 |
void (*device_synchronize)(struct dma_chan *chan); |
| 784 |
|
787 |
|
| 785 |
enum dma_status (*device_tx_status)(struct dma_chan *chan, |
788 |
enum dma_status (*device_tx_status)(struct dma_chan *chan, |
| 786 |
dma_cookie_t cookie, |
789 |
dma_cookie_t cookie, |
|
Lines 872-877
static inline struct dma_async_tx_descriptor *dmaengine_prep_dma_sg(
Link Here
|
| 872 |
src_sg, src_nents, flags); |
875 |
src_sg, src_nents, flags); |
| 873 |
} |
876 |
} |
| 874 |
|
877 |
|
|
|
878 |
/** |
| 879 |
* dmaengine_terminate_all() - Terminate all active DMA transfers |
| 880 |
* @chan: The channel for which to terminate the transfers |
| 881 |
* |
| 882 |
* This function is DEPRECATED use either dmaengine_terminate_sync() or |
| 883 |
* dmaengine_terminate_async() instead. |
| 884 |
*/ |
| 875 |
static inline int dmaengine_terminate_all(struct dma_chan *chan) |
885 |
static inline int dmaengine_terminate_all(struct dma_chan *chan) |
| 876 |
{ |
886 |
{ |
| 877 |
if (chan->device->device_terminate_all) |
887 |
if (chan->device->device_terminate_all) |
|
Lines 880-885
static inline int dmaengine_terminate_all(struct dma_chan *chan)
Link Here
|
| 880 |
return -ENOSYS; |
890 |
return -ENOSYS; |
| 881 |
} |
891 |
} |
| 882 |
|
892 |
|
|
|
893 |
/** |
| 894 |
* dmaengine_terminate_async() - Terminate all active DMA transfers |
| 895 |
* @chan: The channel for which to terminate the transfers |
| 896 |
* |
| 897 |
* Calling this function will terminate all active and pending descriptors |
| 898 |
* that have previously been submitted to the channel. It is not guaranteed |
| 899 |
* though that the transfer for the active descriptor has stopped when the |
| 900 |
* function returns. Furthermore it is possible the complete callback of a |
| 901 |
* submitted transfer is still running when this function returns. |
| 902 |
* |
| 903 |
* dmaengine_synchronize() needs to be called before it is safe to free |
| 904 |
* any memory that is accessed by previously submitted descriptors or before |
| 905 |
* freeing any resources accessed from within the completion callback of any |
| 906 |
* perviously submitted descriptors. |
| 907 |
* |
| 908 |
* This function can be called from atomic context as well as from within a |
| 909 |
* complete callback of a descriptor submitted on the same channel. |
| 910 |
* |
| 911 |
* If none of the two conditions above apply consider using |
| 912 |
* dmaengine_terminate_sync() instead. |
| 913 |
*/ |
| 914 |
static inline int dmaengine_terminate_async(struct dma_chan *chan) |
| 915 |
{ |
| 916 |
if (chan->device->device_terminate_all) |
| 917 |
return chan->device->device_terminate_all(chan); |
| 918 |
|
| 919 |
return -EINVAL; |
| 920 |
} |
| 921 |
|
| 922 |
/** |
| 923 |
* dmaengine_synchronize() - Synchronize DMA channel termination |
| 924 |
* @chan: The channel to synchronize |
| 925 |
* |
| 926 |
* Synchronizes to the DMA channel termination to the current context. When this |
| 927 |
* function returns it is guaranteed that all transfers for previously issued |
| 928 |
* descriptors have stopped and and it is safe to free the memory assoicated |
| 929 |
* with them. Furthermore it is guaranteed that all complete callback functions |
| 930 |
* for a previously submitted descriptor have finished running and it is safe to |
| 931 |
* free resources accessed from within the complete callbacks. |
| 932 |
* |
| 933 |
* The behavior of this function is undefined if dma_async_issue_pending() has |
| 934 |
* been called between dmaengine_terminate_async() and this function. |
| 935 |
* |
| 936 |
* This function must only be called from non-atomic context and must not be |
| 937 |
* called from within a complete callback of a descriptor submitted on the same |
| 938 |
* channel. |
| 939 |
*/ |
| 940 |
static inline void dmaengine_synchronize(struct dma_chan *chan) |
| 941 |
{ |
| 942 |
if (chan->device->device_synchronize) |
| 943 |
chan->device->device_synchronize(chan); |
| 944 |
} |
| 945 |
|
| 946 |
/** |
| 947 |
* dmaengine_terminate_sync() - Terminate all active DMA transfers |
| 948 |
* @chan: The channel for which to terminate the transfers |
| 949 |
* |
| 950 |
* Calling this function will terminate all active and pending transfers |
| 951 |
* that have previously been submitted to the channel. It is similar to |
| 952 |
* dmaengine_terminate_async() but guarantees that the DMA transfer has actually |
| 953 |
* stopped and that all complete callbacks have finished running when the |
| 954 |
* function returns. |
| 955 |
* |
| 956 |
* This function must only be called from non-atomic context and must not be |
| 957 |
* called from within a complete callback of a descriptor submitted on the same |
| 958 |
* channel. |
| 959 |
*/ |
| 960 |
static inline int dmaengine_terminate_sync(struct dma_chan *chan) |
| 961 |
{ |
| 962 |
int ret; |
| 963 |
|
| 964 |
ret = dmaengine_terminate_async(chan); |
| 965 |
if (ret) |
| 966 |
return ret; |
| 967 |
|
| 968 |
dmaengine_synchronize(chan); |
| 969 |
|
| 970 |
return 0; |
| 971 |
} |
| 972 |
|
| 883 |
static inline int dmaengine_pause(struct dma_chan *chan) |
973 |
static inline int dmaengine_pause(struct dma_chan *chan) |
| 884 |
{ |
974 |
{ |
| 885 |
if (chan->device->device_pause) |
975 |
if (chan->device->device_pause) |
| 886 |
- |
|
|